diff --git a/.nojekyll b/.nojekyll new file mode 100644 index 00000000000..e69de29bb2d diff --git a/404.html b/404.html new file mode 100644 index 00000000000..9f9ba54c757 --- /dev/null +++ b/404.html @@ -0,0 +1,17 @@ + + + + + +Page Not Found | Platformatic Open Source Software + + + + + +
+
Skip to main content

Page Not Found

We could not find what you were looking for.

Please contact the owner of the site that linked you to the original URL and let them know their link is broken.

+ + + + \ No newline at end of file diff --git a/CNAME b/CNAME new file mode 100644 index 00000000000..92d423a0d7f --- /dev/null +++ b/CNAME @@ -0,0 +1 @@ +docs.platformatic.dev \ No newline at end of file diff --git a/assets/css/styles.758417ba.css b/assets/css/styles.758417ba.css new file mode 100644 index 00000000000..71bcc9ba614 --- /dev/null +++ b/assets/css/styles.758417ba.css @@ -0,0 +1 @@ +.col,.container{padding:0 var(--ifm-spacing-horizontal);width:100%}.markdown>h2,.markdown>h3,.markdown>h4,.markdown>h5,.markdown>h6{margin-bottom:calc(var(--ifm-heading-vertical-rhythm-bottom)*var(--ifm-leading))}.markdown li,body{word-wrap:break-word}body,ol ol,ol ul,ul ol,ul ul{margin:0}pre,table{overflow:auto}blockquote,pre{margin:0 0 var(--ifm-spacing-vertical)}.breadcrumbs__link,.button{transition-timing-function:var(--ifm-transition-timing-default)}.aa-Footer,.button{-webkit-user-select:none}.button,code{vertical-align:middle}.button--outline.button--active,.button--outline:active,.button--outline:hover,:root{--ifm-button-color:var(--ifm-font-color-base-inverse)}.menu__link:hover,a{transition:color var(--ifm-transition-fast) var(--ifm-transition-timing-default)}.navbar--dark,:root{--ifm-navbar-link-hover-color:var(--ifm-color-primary)}.menu,.navbar-sidebar{overflow-x:hidden}:root,html[data-theme=dark]{--ifm-color-emphasis-500:var(--ifm-color-gray-500)}[data-theme=dark],[data-theme=light]{--docusaurus-highlighted-code-line-bg:#0000004d}.aa-DetachedSearchButton,.aa-Footer{--key-shadow:inset 0 -2px 0 0 #cdcde6,inset 0 0 1px 1px #fff,0 1px 2px 1px #1e235a66;--key-gradient:linear-gradient(-225deg,#d5dbe4,#f8f8f8)}.toggleButton_gllP,html{-webkit-tap-highlight-color:transparent}:root,body.dark,body[data-theme=dark]{--aa-icon-color-rgb:119,119,163;--aa-scrollbar-thumb-background-color-rgb:var(--aa-background-color-rgb)}.aa-DetachedSearchButtonQuery,.aa-ItemContentTitle,.button,.dropdown__link,.text--truncate,a.aa-FooterSearchCredit span{white-space:nowrap}.aa-List,.clean-list,.containsTaskList_mC6p,.details_lb9f>summary,.dropdown__menu,.menu__list,ul.aa-FooterCommands{list-style:none}:root{--ifm-color-scheme:light;--ifm-dark-value:10%;--ifm-darker-value:15%;--ifm-darkest-value:30%;--ifm-light-value:15%;--ifm-lighter-value:30%;--ifm-lightest-value:50%;--ifm-contrast-background-value:90%;--ifm-contrast-foreground-value:70%;--ifm-contrast-background-dark-value:70%;--ifm-contrast-foreground-dark-value:90%;--ifm-color-primary:#3578e5;--ifm-color-secondary:#ebedf0;--ifm-color-success:#00a400;--ifm-color-info:#54c7ec;--ifm-color-warning:#ffba00;--ifm-color-danger:#fa383e;--ifm-color-primary-dark:#306cce;--ifm-color-primary-darker:#2d66c3;--ifm-color-primary-darkest:#2554a0;--ifm-color-primary-light:#538ce9;--ifm-color-primary-lighter:#72a1ed;--ifm-color-primary-lightest:#9abcf2;--ifm-color-primary-contrast-background:#ebf2fc;--ifm-color-primary-contrast-foreground:#102445;--ifm-color-secondary-dark:#d4d5d8;--ifm-color-secondary-darker:#c8c9cc;--ifm-color-secondary-darkest:#a4a6a8;--ifm-color-secondary-light:#eef0f2;--ifm-color-secondary-lighter:#f1f2f5;--ifm-color-secondary-lightest:#f5f6f8;--ifm-color-secondary-contrast-background:#fdfdfe;--ifm-color-secondary-contrast-foreground:#474748;--ifm-color-success-dark:#009400;--ifm-color-success-darker:#008b00;--ifm-color-success-darkest:#007300;--ifm-color-success-light:#26b226;--ifm-color-success-lighter:#4dbf4d;--ifm-color-success-lightest:#80d280;--ifm-color-success-contrast-background:#e6f6e6;--ifm-color-success-contrast-foreground:#003100;--ifm-color-info-dark:#4cb3d4;--ifm-color-info-darker:#47a9c9;--ifm-color-info-darkest:#3b8ba5;--ifm-color-info-light:#6ecfef;--ifm-color-info-lighter:#87d8f2;--ifm-color-info-lightest:#aae3f6;--ifm-color-info-contrast-background:#eef9fd;--ifm-color-info-contrast-foreground:#193c47;--ifm-color-warning-dark:#e6a700;--ifm-color-warning-darker:#d99e00;--ifm-color-warning-darkest:#b38200;--ifm-color-warning-light:#ffc426;--ifm-color-warning-lighter:#ffcf4d;--ifm-color-warning-lightest:#ffdd80;--ifm-color-warning-contrast-background:#fff8e6;--ifm-color-warning-contrast-foreground:#4d3800;--ifm-color-danger-dark:#e13238;--ifm-color-danger-darker:#d53035;--ifm-color-danger-darkest:#af272b;--ifm-color-danger-light:#fb565b;--ifm-color-danger-lighter:#fb7478;--ifm-color-danger-lightest:#fd9c9f;--ifm-color-danger-contrast-background:#ffebec;--ifm-color-danger-contrast-foreground:#4b1113;--ifm-color-white:#fff;--ifm-color-black:#000;--ifm-color-gray-0:var(--ifm-color-white);--ifm-color-gray-100:#f5f6f7;--ifm-color-gray-200:#ebedf0;--ifm-color-gray-300:#dadde1;--ifm-color-gray-400:#ccd0d5;--ifm-color-gray-500:#bec3c9;--ifm-color-gray-600:#8d949e;--ifm-color-gray-700:#606770;--ifm-color-gray-800:#444950;--ifm-color-gray-900:#1c1e21;--ifm-color-gray-1000:var(--ifm-color-black);--ifm-color-emphasis-0:var(--ifm-color-gray-0);--ifm-color-emphasis-100:var(--ifm-color-gray-100);--ifm-color-emphasis-200:var(--ifm-color-gray-200);--ifm-color-emphasis-300:var(--ifm-color-gray-300);--ifm-color-emphasis-400:var(--ifm-color-gray-400);--ifm-color-emphasis-600:var(--ifm-color-gray-600);--ifm-color-emphasis-700:var(--ifm-color-gray-700);--ifm-color-emphasis-800:var(--ifm-color-gray-800);--ifm-color-emphasis-900:var(--ifm-color-gray-900);--ifm-color-emphasis-1000:var(--ifm-color-gray-1000);--ifm-color-content:var(--ifm-color-emphasis-900);--ifm-color-content-inverse:var(--ifm-color-emphasis-0);--ifm-color-content-secondary:#525860;--ifm-background-surface-color:var(--ifm-color-content-inverse);--ifm-global-border-width:1px;--ifm-global-radius:0.4rem;--ifm-hover-overlay:#0000000d;--ifm-font-color-base:var(--ifm-color-content);--ifm-font-color-base-inverse:var(--ifm-color-content-inverse);--ifm-font-color-secondary:var(--ifm-color-content-secondary);--ifm-font-family-base:system-ui,-apple-system,Segoe UI,Roboto,Ubuntu,Cantarell,Noto Sans,sans-serif,BlinkMacSystemFont,"Segoe UI",Helvetica,Arial,sans-serif,"Apple Color Emoji","Segoe UI Emoji","Segoe UI Symbol";--ifm-font-family-monospace:SFMono-Regular,Menlo,Monaco,Consolas,"Liberation Mono","Courier New",monospace;--ifm-font-size-base:100%;--ifm-font-weight-light:300;--ifm-font-weight-normal:400;--ifm-font-weight-semibold:500;--ifm-font-weight-bold:700;--ifm-font-weight-base:var(--ifm-font-weight-normal);--ifm-line-height-base:1.65;--ifm-global-spacing:1rem;--ifm-spacing-vertical:var(--ifm-global-spacing);--ifm-spacing-horizontal:var(--ifm-global-spacing);--ifm-transition-fast:200ms;--ifm-transition-slow:400ms;--ifm-transition-timing-default:cubic-bezier(0.08,0.52,0.52,1);--ifm-global-shadow-lw:0 1px 2px 0 #0000001a;--ifm-global-shadow-md:0 5px 40px #0003;--ifm-global-shadow-tl:0 12px 28px 0 #0003,0 2px 4px 0 #0000001a;--ifm-z-index-dropdown:100;--ifm-z-index-fixed:200;--ifm-z-index-overlay:400;--ifm-container-width:1140px;--ifm-container-width-xl:1320px;--ifm-code-background:#f6f7f8;--ifm-code-border-radius:var(--ifm-global-radius);--ifm-code-font-size:90%;--ifm-code-padding-horizontal:0.1rem;--ifm-code-padding-vertical:0.1rem;--ifm-pre-background:var(--ifm-code-background);--ifm-pre-border-radius:var(--ifm-code-border-radius);--ifm-pre-color:inherit;--ifm-pre-line-height:1.45;--ifm-pre-padding:1rem;--ifm-heading-color:inherit;--ifm-heading-margin-top:0;--ifm-heading-margin-bottom:var(--ifm-spacing-vertical);--ifm-heading-font-family:var(--ifm-font-family-base);--ifm-heading-font-weight:var(--ifm-font-weight-bold);--ifm-heading-line-height:1.25;--ifm-h1-font-size:2rem;--ifm-h2-font-size:1.5rem;--ifm-h3-font-size:1.25rem;--ifm-h4-font-size:1rem;--ifm-h5-font-size:0.875rem;--ifm-h6-font-size:0.85rem;--ifm-image-alignment-padding:1.25rem;--ifm-leading-desktop:1.25;--ifm-leading:calc(var(--ifm-leading-desktop)*1rem);--ifm-list-left-padding:2rem;--ifm-list-margin:1rem;--ifm-list-item-margin:0.25rem;--ifm-list-paragraph-margin:1rem;--ifm-table-cell-padding:0.75rem;--ifm-table-background:#0000;--ifm-table-stripe-background:#00000008;--ifm-table-border-width:1px;--ifm-table-border-color:var(--ifm-color-emphasis-300);--ifm-table-head-background:inherit;--ifm-table-head-color:inherit;--ifm-table-head-font-weight:var(--ifm-font-weight-bold);--ifm-table-cell-color:inherit;--ifm-link-color:var(--ifm-color-primary);--ifm-link-decoration:none;--ifm-link-hover-color:var(--ifm-link-color);--ifm-link-hover-decoration:underline;--ifm-paragraph-margin-bottom:var(--ifm-leading);--ifm-blockquote-font-size:var(--ifm-font-size-base);--ifm-blockquote-border-left-width:2px;--ifm-blockquote-padding-horizontal:var(--ifm-spacing-horizontal);--ifm-blockquote-padding-vertical:0;--ifm-blockquote-shadow:none;--ifm-blockquote-color:var(--ifm-color-emphasis-800);--ifm-blockquote-border-color:var(--ifm-color-emphasis-300);--ifm-hr-background-color:var(--ifm-color-emphasis-500);--ifm-hr-height:1px;--ifm-hr-margin-vertical:1.5rem;--ifm-scrollbar-size:7px;--ifm-scrollbar-track-background-color:#f1f1f1;--ifm-scrollbar-thumb-background-color:silver;--ifm-scrollbar-thumb-hover-background-color:#a7a7a7;--ifm-alert-background-color:inherit;--ifm-alert-border-color:inherit;--ifm-alert-border-radius:var(--ifm-global-radius);--ifm-alert-border-width:0px;--ifm-alert-border-left-width:5px;--ifm-alert-color:var(--ifm-font-color-base);--ifm-alert-padding-horizontal:var(--ifm-spacing-horizontal);--ifm-alert-padding-vertical:var(--ifm-spacing-vertical);--ifm-alert-shadow:var(--ifm-global-shadow-lw);--ifm-avatar-intro-margin:1rem;--ifm-avatar-intro-alignment:inherit;--ifm-avatar-photo-size:3rem;--ifm-badge-background-color:inherit;--ifm-badge-border-color:inherit;--ifm-badge-border-radius:var(--ifm-global-radius);--ifm-badge-border-width:var(--ifm-global-border-width);--ifm-badge-color:var(--ifm-color-white);--ifm-badge-padding-horizontal:calc(var(--ifm-spacing-horizontal)*0.5);--ifm-badge-padding-vertical:calc(var(--ifm-spacing-vertical)*0.25);--ifm-breadcrumb-border-radius:1.5rem;--ifm-breadcrumb-spacing:0.5rem;--ifm-breadcrumb-color-active:var(--ifm-color-primary);--ifm-breadcrumb-item-background-active:var(--ifm-hover-overlay);--ifm-breadcrumb-padding-horizontal:0.8rem;--ifm-breadcrumb-padding-vertical:0.4rem;--ifm-breadcrumb-size-multiplier:1;--ifm-breadcrumb-separator:url('data:image/svg+xml;utf8,');--ifm-breadcrumb-separator-filter:none;--ifm-breadcrumb-separator-size:0.5rem;--ifm-breadcrumb-separator-size-multiplier:1.25;--ifm-button-background-color:inherit;--ifm-button-border-color:var(--ifm-button-background-color);--ifm-button-border-width:var(--ifm-global-border-width);--ifm-button-font-weight:var(--ifm-font-weight-bold);--ifm-button-padding-horizontal:1.5rem;--ifm-button-padding-vertical:0.375rem;--ifm-button-size-multiplier:1;--ifm-button-transition-duration:var(--ifm-transition-fast);--ifm-button-border-radius:calc(var(--ifm-global-radius)*var(--ifm-button-size-multiplier));--ifm-button-group-spacing:2px;--ifm-card-background-color:var(--ifm-background-surface-color);--ifm-card-border-radius:calc(var(--ifm-global-radius)*2);--ifm-card-horizontal-spacing:var(--ifm-global-spacing);--ifm-card-vertical-spacing:var(--ifm-global-spacing);--ifm-toc-border-color:var(--ifm-color-emphasis-300);--ifm-toc-link-color:var(--ifm-color-content-secondary);--ifm-toc-padding-vertical:0.5rem;--ifm-toc-padding-horizontal:0.5rem;--ifm-dropdown-background-color:var(--ifm-background-surface-color);--ifm-dropdown-font-weight:var(--ifm-font-weight-semibold);--ifm-dropdown-link-color:var(--ifm-font-color-base);--ifm-dropdown-hover-background-color:var(--ifm-hover-overlay);--ifm-footer-background-color:var(--ifm-color-emphasis-100);--ifm-footer-color:inherit;--ifm-footer-link-color:var(--ifm-color-emphasis-700);--ifm-footer-link-hover-color:var(--ifm-color-primary);--ifm-footer-link-horizontal-spacing:0.5rem;--ifm-footer-padding-horizontal:calc(var(--ifm-spacing-horizontal)*2);--ifm-footer-padding-vertical:calc(var(--ifm-spacing-vertical)*2);--ifm-footer-title-color:inherit;--ifm-footer-logo-max-width:min(30rem,90vw);--ifm-hero-background-color:var(--ifm-background-surface-color);--ifm-hero-text-color:var(--ifm-color-emphasis-800);--ifm-menu-color:var(--ifm-color-emphasis-700);--ifm-menu-color-active:var(--ifm-color-primary);--ifm-menu-color-background-active:var(--ifm-hover-overlay);--ifm-menu-color-background-hover:var(--ifm-hover-overlay);--ifm-menu-link-padding-horizontal:0.75rem;--ifm-menu-link-padding-vertical:0.375rem;--ifm-menu-link-sublist-icon:url('data:image/svg+xml;utf8,');--ifm-menu-link-sublist-icon-filter:none;--ifm-navbar-background-color:var(--ifm-background-surface-color);--ifm-navbar-height:3.75rem;--ifm-navbar-item-padding-horizontal:0.75rem;--ifm-navbar-item-padding-vertical:0.25rem;--ifm-navbar-link-color:var(--ifm-font-color-base);--ifm-navbar-link-active-color:var(--ifm-link-color);--ifm-navbar-padding-horizontal:var(--ifm-spacing-horizontal);--ifm-navbar-padding-vertical:calc(var(--ifm-spacing-vertical)*0.5);--ifm-navbar-shadow:var(--ifm-global-shadow-lw);--ifm-navbar-search-input-background-color:var(--ifm-color-emphasis-200);--ifm-navbar-search-input-color:var(--ifm-color-emphasis-800);--ifm-navbar-search-input-placeholder-color:var(--ifm-color-emphasis-500);--ifm-navbar-search-input-icon:url('data:image/svg+xml;utf8,');--ifm-navbar-sidebar-width:83vw;--ifm-pagination-border-radius:var(--ifm-global-radius);--ifm-pagination-color-active:var(--ifm-color-primary);--ifm-pagination-font-size:1rem;--ifm-pagination-item-active-background:var(--ifm-hover-overlay);--ifm-pagination-page-spacing:0.2em;--ifm-pagination-padding-horizontal:calc(var(--ifm-spacing-horizontal)*1);--ifm-pagination-padding-vertical:calc(var(--ifm-spacing-vertical)*0.25);--ifm-pagination-nav-border-radius:var(--ifm-global-radius);--ifm-pagination-nav-color-hover:var(--ifm-color-primary);--ifm-pills-color-active:var(--ifm-color-primary);--ifm-pills-color-background-active:var(--ifm-hover-overlay);--ifm-pills-spacing:0.125rem;--ifm-tabs-color:var(--ifm-font-color-secondary);--ifm-tabs-color-active:var(--ifm-color-primary);--ifm-tabs-color-active-border:var(--ifm-tabs-color-active);--ifm-tabs-padding-horizontal:1rem;--ifm-tabs-padding-vertical:1rem}:root,[data-theme=light]{--ifm-color-primary:#00283d;--ifm-background-color:#0000}.badge--danger,.badge--info,.badge--primary,.badge--secondary,.badge--success,.badge--warning{--ifm-badge-border-color:var(--ifm-badge-background-color)}.button--link,.button--outline{--ifm-button-background-color:#0000}*,.aa-Autocomplete *,.aa-DetachedFormContainer *,.aa-Panel *{box-sizing:border-box}html{-webkit-font-smoothing:antialiased;-webkit-text-size-adjust:100%;text-size-adjust:100%;background-color:var(--ifm-background-color);color:var(--ifm-font-color-base);color-scheme:var(--ifm-color-scheme);font:var(--ifm-font-size-base)/var(--ifm-line-height-base) var(--ifm-font-family-base);text-rendering:optimizelegibility}iframe{border:0;color-scheme:auto}.container{margin:0 auto;max-width:var(--ifm-container-width)}.container--fluid{max-width:inherit}.row{display:flex;flex-wrap:wrap;margin:0 calc(var(--ifm-spacing-horizontal)*-1)}.list_eTzJ article:last-child,.margin-bottom--none,.margin-vert--none,.markdown>:last-child{margin-bottom:0!important}.margin-top--none,.margin-vert--none,.tabItem_LNqP{margin-top:0!important}.row--no-gutters{margin-left:0;margin-right:0}.margin-horiz--none,.margin-right--none{margin-right:0!important}.row--no-gutters>.col{padding-left:0;padding-right:0}.row--align-top{align-items:flex-start}.row--align-bottom{align-items:flex-end}.menuExternalLink_NmtK,.row--align-center{align-items:center}.row--align-stretch{align-items:stretch}.row--align-baseline{align-items:baseline}.col{--ifm-col-width:100%;flex:1 0;margin-left:0;max-width:var(--ifm-col-width)}.padding-bottom--none,.padding-vert--none{padding-bottom:0!important}.padding-top--none,.padding-vert--none{padding-top:0!important}.padding-horiz--none,.padding-left--none{padding-left:0!important}.padding-horiz--none,.padding-right--none{padding-right:0!important}.col[class*=col--]{flex:0 0 var(--ifm-col-width)}.col--1{--ifm-col-width:8.33333%}.col--offset-1{margin-left:8.33333%}.col--2{--ifm-col-width:16.66667%}.col--offset-2{margin-left:16.66667%}.col--3{--ifm-col-width:25%}.col--offset-3{margin-left:25%}.col--4{--ifm-col-width:33.33333%}.col--offset-4{margin-left:33.33333%}.col--5{--ifm-col-width:41.66667%}.col--offset-5{margin-left:41.66667%}.col--6{--ifm-col-width:50%}.col--offset-6{margin-left:50%}.col--7{--ifm-col-width:58.33333%}.col--offset-7{margin-left:58.33333%}.col--8{--ifm-col-width:66.66667%}.col--offset-8{margin-left:66.66667%}.col--9{--ifm-col-width:75%}.col--offset-9{margin-left:75%}.col--10{--ifm-col-width:83.33333%}.col--offset-10{margin-left:83.33333%}.col--11{--ifm-col-width:91.66667%}.col--offset-11{margin-left:91.66667%}.col--12{--ifm-col-width:100%}.col--offset-12{margin-left:100%}.margin-horiz--none,.margin-left--none{margin-left:0!important}.margin--none{margin:0!important}.margin-bottom--xs,.margin-vert--xs{margin-bottom:.25rem!important}.margin-top--xs,.margin-vert--xs{margin-top:.25rem!important}.margin-horiz--xs,.margin-left--xs{margin-left:.25rem!important}.margin-horiz--xs,.margin-right--xs{margin-right:.25rem!important}.margin--xs{margin:.25rem!important}.margin-bottom--sm,.margin-vert--sm{margin-bottom:.5rem!important}.margin-top--sm,.margin-vert--sm{margin-top:.5rem!important}.margin-horiz--sm,.margin-left--sm{margin-left:.5rem!important}.margin-horiz--sm,.margin-right--sm{margin-right:.5rem!important}.margin--sm{margin:.5rem!important}.margin-bottom--md,.margin-vert--md{margin-bottom:1rem!important}.margin-top--md,.margin-vert--md{margin-top:1rem!important}.margin-horiz--md,.margin-left--md{margin-left:1rem!important}.margin-horiz--md,.margin-right--md{margin-right:1rem!important}.margin--md{margin:1rem!important}.margin-bottom--lg,.margin-vert--lg{margin-bottom:2rem!important}.margin-top--lg,.margin-vert--lg{margin-top:2rem!important}.margin-horiz--lg,.margin-left--lg{margin-left:2rem!important}.margin-horiz--lg,.margin-right--lg{margin-right:2rem!important}.margin--lg{margin:2rem!important}.margin-bottom--xl,.margin-vert--xl{margin-bottom:5rem!important}.margin-top--xl,.margin-vert--xl{margin-top:5rem!important}.margin-horiz--xl,.margin-left--xl{margin-left:5rem!important}.margin-horiz--xl,.margin-right--xl{margin-right:5rem!important}.margin--xl{margin:5rem!important}.padding--none{padding:0!important}.padding-bottom--xs,.padding-vert--xs{padding-bottom:.25rem!important}.padding-top--xs,.padding-vert--xs{padding-top:.25rem!important}.padding-horiz--xs,.padding-left--xs{padding-left:.25rem!important}.padding-horiz--xs,.padding-right--xs{padding-right:.25rem!important}.padding--xs{padding:.25rem!important}.padding-bottom--sm,.padding-vert--sm{padding-bottom:.5rem!important}.padding-top--sm,.padding-vert--sm{padding-top:.5rem!important}.padding-horiz--sm,.padding-left--sm{padding-left:.5rem!important}.padding-horiz--sm,.padding-right--sm{padding-right:.5rem!important}.padding--sm{padding:.5rem!important}.padding-bottom--md,.padding-vert--md{padding-bottom:1rem!important}.padding-top--md,.padding-vert--md{padding-top:1rem!important}.padding-horiz--md,.padding-left--md{padding-left:1rem!important}.padding-horiz--md,.padding-right--md{padding-right:1rem!important}.padding--md{padding:1rem!important}.padding-bottom--lg,.padding-vert--lg{padding-bottom:2rem!important}.padding-top--lg,.padding-vert--lg{padding-top:2rem!important}.padding-horiz--lg,.padding-left--lg{padding-left:2rem!important}.padding-horiz--lg,.padding-right--lg{padding-right:2rem!important}.padding--lg{padding:2rem!important}.padding-bottom--xl,.padding-vert--xl{padding-bottom:5rem!important}.padding-top--xl,.padding-vert--xl{padding-top:5rem!important}.padding-horiz--xl,.padding-left--xl{padding-left:5rem!important}.padding-horiz--xl,.padding-right--xl{padding-right:5rem!important}.padding--xl{padding:5rem!important}code{background-color:var(--ifm-code-background);border:.1rem solid #0000001a;border-radius:var(--ifm-code-border-radius);font-family:var(--ifm-font-family-monospace);font-size:var(--ifm-code-font-size);padding:var(--ifm-code-padding-vertical) var(--ifm-code-padding-horizontal)}a code{color:inherit}pre{background-color:var(--ifm-pre-background);border-radius:var(--ifm-pre-border-radius);color:var(--ifm-pre-color);font:var(--ifm-code-font-size)/var(--ifm-pre-line-height) var(--ifm-font-family-monospace);padding:var(--ifm-pre-padding)}pre code{background-color:initial;border:none;font-size:100%;line-height:inherit;padding:0}kbd{background-color:var(--ifm-color-emphasis-0);border:1px solid var(--ifm-color-emphasis-400);border-radius:.2rem;box-shadow:inset 0 -1px 0 var(--ifm-color-emphasis-400);color:var(--ifm-color-emphasis-800);font:80% var(--ifm-font-family-monospace);padding:.15rem .3rem}h1,h2,h3,h4,h5,h6{color:var(--ifm-heading-color);font-family:var(--ifm-heading-font-family);font-weight:var(--ifm-heading-font-weight);line-height:var(--ifm-heading-line-height);margin:var(--ifm-heading-margin-top) 0 var(--ifm-heading-margin-bottom) 0}h1{font-size:var(--ifm-h1-font-size)}h2{font-size:var(--ifm-h2-font-size)}h3{font-size:var(--ifm-h3-font-size)}h4{font-size:var(--ifm-h4-font-size)}h5{font-size:var(--ifm-h5-font-size)}h6{font-size:var(--ifm-h6-font-size)}img{max-width:100%}img[align=right]{padding-left:var(--image-alignment-padding)}img[align=left]{padding-right:var(--image-alignment-padding)}.markdown{--ifm-h1-vertical-rhythm-top:3;--ifm-h2-vertical-rhythm-top:2;--ifm-h3-vertical-rhythm-top:1.5;--ifm-heading-vertical-rhythm-top:1.25;--ifm-h1-vertical-rhythm-bottom:1.25;--ifm-heading-vertical-rhythm-bottom:1}.markdown:after,.markdown:before{content:"";display:table}.markdown:after{clear:both}.markdown h1:first-child{--ifm-h1-font-size:3rem;margin-bottom:calc(var(--ifm-h1-vertical-rhythm-bottom)*var(--ifm-leading))}.markdown>h2{--ifm-h2-font-size:2rem;margin-top:calc(var(--ifm-h2-vertical-rhythm-top)*var(--ifm-leading))}.markdown>h3{--ifm-h3-font-size:1.5rem;margin-top:calc(var(--ifm-h3-vertical-rhythm-top)*var(--ifm-leading))}.markdown>h4,.markdown>h5,.markdown>h6{margin-top:calc(var(--ifm-heading-vertical-rhythm-top)*var(--ifm-leading))}.markdown>p,.markdown>pre,.markdown>ul,.tabList__CuJ{margin-bottom:var(--ifm-leading)}.markdown li>p{margin-top:var(--ifm-list-paragraph-margin)}.markdown li+li{margin-top:var(--ifm-list-item-margin)}ol,ul{margin:0 0 var(--ifm-list-margin);padding-left:var(--ifm-list-left-padding)}ol ol,ul ol{list-style-type:lower-roman}ol ol ol,ol ul ol,ul ol ol,ul ul ol{list-style-type:lower-alpha}table{border-collapse:collapse;display:block;margin-bottom:var(--ifm-spacing-vertical)}table thead tr{border-bottom:2px solid var(--ifm-table-border-color)}table thead,table tr:nth-child(2n){background-color:var(--ifm-table-stripe-background)}table tr{background-color:var(--ifm-table-background);border-top:var(--ifm-table-border-width) solid var(--ifm-table-border-color)}table td,table th{border:var(--ifm-table-border-width) solid var(--ifm-table-border-color);padding:var(--ifm-table-cell-padding)}table th{background-color:var(--ifm-table-head-background);color:var(--ifm-table-head-color);font-weight:var(--ifm-table-head-font-weight)}table td{color:var(--ifm-table-cell-color)}strong{font-weight:var(--ifm-font-weight-bold)}a{color:var(--ifm-link-color);text-decoration:var(--ifm-link-decoration)}a:hover{color:var(--ifm-link-hover-color);text-decoration:var(--ifm-link-hover-decoration)}.button:hover,.text--no-decoration,.text--no-decoration:hover,a:not([href]){text-decoration:none}p{margin:0 0 var(--ifm-paragraph-margin-bottom)}blockquote{border-left:var(--ifm-blockquote-border-left-width) solid var(--ifm-blockquote-border-color);box-shadow:var(--ifm-blockquote-shadow);color:var(--ifm-blockquote-color);font-size:var(--ifm-blockquote-font-size);padding:var(--ifm-blockquote-padding-vertical) var(--ifm-blockquote-padding-horizontal)}blockquote>:first-child{margin-top:0}blockquote>:last-child{margin-bottom:0}hr{background-color:var(--ifm-hr-background-color);border:0;height:var(--ifm-hr-height);margin:var(--ifm-hr-margin-vertical) 0}.shadow--lw{box-shadow:var(--ifm-global-shadow-lw)!important}.shadow--md{box-shadow:var(--ifm-global-shadow-md)!important}.shadow--tl{box-shadow:var(--ifm-global-shadow-tl)!important}.text--primary,.wordWrapButtonEnabled_EoeP .wordWrapButtonIcon_Bwma{color:var(--ifm-color-primary)}.text--secondary{color:var(--ifm-color-secondary)}.text--success{color:var(--ifm-color-success)}.text--info{color:var(--ifm-color-info)}.text--warning{color:var(--ifm-color-warning)}.text--danger{color:var(--ifm-color-danger)}.features_f11O h2,.features_t9lD h1,.section__quotes_AssG h1,.text--center,.video_xvMC h1{text-align:center}.text--left{text-align:left}.text--justify{text-align:justify}.text--right{text-align:right}.text--capitalize{text-transform:capitalize}.text--lowercase{text-transform:lowercase}.admonitionHeading_tbUL,.alert__heading,.text--uppercase{text-transform:uppercase}.text--light{font-weight:var(--ifm-font-weight-light)}.text--normal{font-weight:var(--ifm-font-weight-normal)}.text--semibold{font-weight:var(--ifm-font-weight-semibold)}.text--bold{font-weight:var(--ifm-font-weight-bold)}.text--italic{font-style:italic}.text--truncate{overflow:hidden;text-overflow:ellipsis}.text--break{word-wrap:break-word!important;word-break:break-word!important}.clean-btn{background:none;border:none;color:inherit;cursor:pointer;font-family:inherit;padding:0}.alert,.alert .close{color:var(--ifm-alert-foreground-color)}.clean-list{padding-left:0}.alert--primary{--ifm-alert-background-color:var(--ifm-color-primary-contrast-background);--ifm-alert-background-color-highlight:#3578e526;--ifm-alert-foreground-color:var(--ifm-color-primary-contrast-foreground);--ifm-alert-border-color:var(--ifm-color-primary-dark)}.alert--secondary{--ifm-alert-background-color:var(--ifm-color-secondary-contrast-background);--ifm-alert-background-color-highlight:#ebedf026;--ifm-alert-foreground-color:var(--ifm-color-secondary-contrast-foreground);--ifm-alert-border-color:var(--ifm-color-secondary-dark)}.alert--success{--ifm-alert-background-color:var(--ifm-color-success-contrast-background);--ifm-alert-background-color-highlight:#00a40026;--ifm-alert-foreground-color:var(--ifm-color-success-contrast-foreground);--ifm-alert-border-color:var(--ifm-color-success-dark)}.alert--info{--ifm-alert-background-color:var(--ifm-color-info-contrast-background);--ifm-alert-background-color-highlight:#54c7ec26;--ifm-alert-foreground-color:var(--ifm-color-info-contrast-foreground);--ifm-alert-border-color:var(--ifm-color-info-dark)}.alert--warning{--ifm-alert-background-color:var(--ifm-color-warning-contrast-background);--ifm-alert-background-color-highlight:#ffba0026;--ifm-alert-foreground-color:var(--ifm-color-warning-contrast-foreground);--ifm-alert-border-color:var(--ifm-color-warning-dark)}.alert--danger{--ifm-alert-background-color:var(--ifm-color-danger-contrast-background);--ifm-alert-background-color-highlight:#fa383e26;--ifm-alert-foreground-color:var(--ifm-color-danger-contrast-foreground);--ifm-alert-border-color:var(--ifm-color-danger-dark)}.alert{--ifm-code-background:var(--ifm-alert-background-color-highlight);--ifm-link-color:var(--ifm-alert-foreground-color);--ifm-link-hover-color:var(--ifm-alert-foreground-color);--ifm-link-decoration:underline;--ifm-tabs-color:var(--ifm-alert-foreground-color);--ifm-tabs-color-active:var(--ifm-alert-foreground-color);--ifm-tabs-color-active-border:var(--ifm-alert-border-color);background-color:var(--ifm-alert-background-color);border:var(--ifm-alert-border-width) solid var(--ifm-alert-border-color);border-left-width:var(--ifm-alert-border-left-width);border-radius:var(--ifm-alert-border-radius);box-shadow:var(--ifm-alert-shadow);padding:var(--ifm-alert-padding-vertical) var(--ifm-alert-padding-horizontal)}.alert__heading{align-items:center;display:flex;font:700 var(--ifm-h5-font-size)/var(--ifm-heading-line-height) var(--ifm-heading-font-family);margin-bottom:.5rem}.alert__icon{display:inline-flex;margin-right:.4em}.alert__icon svg{fill:var(--ifm-alert-foreground-color);stroke:var(--ifm-alert-foreground-color);stroke-width:0}.alert .close{margin:calc(var(--ifm-alert-padding-vertical)*-1) calc(var(--ifm-alert-padding-horizontal)*-1) 0 0;opacity:.75}.alert .close:focus,.alert .close:hover{opacity:1}.alert a{text-decoration-color:var(--ifm-alert-border-color)}.alert a:hover{text-decoration-thickness:2px}.avatar{column-gap:var(--ifm-avatar-intro-margin);display:flex}.avatar__photo{border-radius:50%;display:block;height:var(--ifm-avatar-photo-size);overflow:hidden;width:var(--ifm-avatar-photo-size)}.card--full-height,.navbar__logo img,body,html{height:100%}.avatar__photo--sm{--ifm-avatar-photo-size:2rem}.avatar__photo--lg{--ifm-avatar-photo-size:4rem}.avatar__photo--xl{--ifm-avatar-photo-size:6rem}.avatar__intro{display:flex;flex:1 1;flex-direction:column;justify-content:center;text-align:var(--ifm-avatar-intro-alignment)}.badge,.breadcrumbs__item,.breadcrumbs__link,.button,.dropdown>.navbar__link:after{display:inline-block}.avatar__name{font:700 var(--ifm-h4-font-size)/var(--ifm-heading-line-height) var(--ifm-font-family-base)}.avatar__subtitle{margin-top:.25rem}.avatar--vertical{--ifm-avatar-intro-alignment:center;--ifm-avatar-intro-margin:0.5rem;align-items:center;flex-direction:column}.badge{background-color:var(--ifm-badge-background-color);border:var(--ifm-badge-border-width) solid var(--ifm-badge-border-color);border-radius:var(--ifm-badge-border-radius);color:var(--ifm-badge-color);font-size:75%;font-weight:var(--ifm-font-weight-bold);line-height:1;padding:var(--ifm-badge-padding-vertical) var(--ifm-badge-padding-horizontal)}.badge--primary{--ifm-badge-background-color:var(--ifm-color-primary)}.badge--secondary{--ifm-badge-background-color:var(--ifm-color-secondary);color:var(--ifm-color-black)}.breadcrumbs__link,.button.button--secondary.button--outline:not(.button--active):not(:hover){color:var(--ifm-font-color-base)}.badge--success{--ifm-badge-background-color:var(--ifm-color-success)}.badge--info{--ifm-badge-background-color:var(--ifm-color-info)}.badge--warning{--ifm-badge-background-color:var(--ifm-color-warning)}.badge--danger{--ifm-badge-background-color:var(--ifm-color-danger)}.breadcrumbs{margin-bottom:0;padding-left:0}.breadcrumbs__item:not(:last-child):after{background:var(--ifm-breadcrumb-separator) center;content:" ";display:inline-block;filter:var(--ifm-breadcrumb-separator-filter);height:calc(var(--ifm-breadcrumb-separator-size)*var(--ifm-breadcrumb-size-multiplier)*var(--ifm-breadcrumb-separator-size-multiplier));margin:0 var(--ifm-breadcrumb-spacing);opacity:.5;width:calc(var(--ifm-breadcrumb-separator-size)*var(--ifm-breadcrumb-size-multiplier)*var(--ifm-breadcrumb-separator-size-multiplier))}.breadcrumbs__item--active .breadcrumbs__link{background:var(--ifm-breadcrumb-item-background-active);color:var(--ifm-breadcrumb-color-active)}.breadcrumbs__link{border-radius:var(--ifm-breadcrumb-border-radius);font-size:calc(1rem*var(--ifm-breadcrumb-size-multiplier));padding:calc(var(--ifm-breadcrumb-padding-vertical)*var(--ifm-breadcrumb-size-multiplier)) calc(var(--ifm-breadcrumb-padding-horizontal)*var(--ifm-breadcrumb-size-multiplier));transition-duration:var(--ifm-transition-fast);transition-property:background,color}.breadcrumbs__link:any-link:hover,.breadcrumbs__link:link:hover,.breadcrumbs__link:visited:hover,area[href].breadcrumbs__link:hover{background:var(--ifm-breadcrumb-item-background-active);text-decoration:none}.breadcrumbs--sm{--ifm-breadcrumb-size-multiplier:0.8}.breadcrumbs--lg{--ifm-breadcrumb-size-multiplier:1.2}.button{background-color:var(--ifm-button-background-color);border:var(--ifm-button-border-width) solid var(--ifm-button-border-color);border-radius:var(--ifm-button-border-radius);cursor:pointer;font-size:calc(.875rem*var(--ifm-button-size-multiplier));font-weight:var(--ifm-button-font-weight);line-height:1.5;padding:calc(var(--ifm-button-padding-vertical)*var(--ifm-button-size-multiplier)) calc(var(--ifm-button-padding-horizontal)*var(--ifm-button-size-multiplier));text-align:center;transition-duration:var(--ifm-button-transition-duration);transition-property:color,background,border-color;user-select:none}.button,.button:hover{color:var(--ifm-button-color)}.button--outline{--ifm-button-color:var(--ifm-button-border-color)}.button--outline:hover{--ifm-button-background-color:var(--ifm-button-border-color)}.button--link{--ifm-button-border-color:#0000;color:var(--ifm-link-color);text-decoration:var(--ifm-link-decoration)}.button--link.button--active,.button--link:active,.button--link:hover{color:var(--ifm-link-hover-color);text-decoration:var(--ifm-link-hover-decoration)}.button.disabled,.button:disabled,.button[disabled]{opacity:.65;pointer-events:none}.button--sm{--ifm-button-size-multiplier:0.8}.button--lg{--ifm-button-size-multiplier:1.35}.button--block{display:block;width:100%}.button.button--secondary{color:var(--ifm-color-gray-900)}:where(.button--primary){--ifm-button-background-color:var(--ifm-color-primary);--ifm-button-border-color:var(--ifm-color-primary)}:where(.button--primary):not(.button--outline):hover{--ifm-button-background-color:var(--ifm-color-primary-dark);--ifm-button-border-color:var(--ifm-color-primary-dark)}.button--primary.button--active,.button--primary:active{--ifm-button-background-color:var(--ifm-color-primary-darker);--ifm-button-border-color:var(--ifm-color-primary-darker)}:where(.button--secondary){--ifm-button-background-color:var(--ifm-color-secondary);--ifm-button-border-color:var(--ifm-color-secondary)}:where(.button--secondary):not(.button--outline):hover{--ifm-button-background-color:var(--ifm-color-secondary-dark);--ifm-button-border-color:var(--ifm-color-secondary-dark)}.button--secondary.button--active,.button--secondary:active{--ifm-button-background-color:var(--ifm-color-secondary-darker);--ifm-button-border-color:var(--ifm-color-secondary-darker)}:where(.button--success){--ifm-button-background-color:var(--ifm-color-success);--ifm-button-border-color:var(--ifm-color-success)}:where(.button--success):not(.button--outline):hover{--ifm-button-background-color:var(--ifm-color-success-dark);--ifm-button-border-color:var(--ifm-color-success-dark)}.button--success.button--active,.button--success:active{--ifm-button-background-color:var(--ifm-color-success-darker);--ifm-button-border-color:var(--ifm-color-success-darker)}:where(.button--info){--ifm-button-background-color:var(--ifm-color-info);--ifm-button-border-color:var(--ifm-color-info)}:where(.button--info):not(.button--outline):hover{--ifm-button-background-color:var(--ifm-color-info-dark);--ifm-button-border-color:var(--ifm-color-info-dark)}.button--info.button--active,.button--info:active{--ifm-button-background-color:var(--ifm-color-info-darker);--ifm-button-border-color:var(--ifm-color-info-darker)}:where(.button--warning){--ifm-button-background-color:var(--ifm-color-warning);--ifm-button-border-color:var(--ifm-color-warning)}:where(.button--warning):not(.button--outline):hover{--ifm-button-background-color:var(--ifm-color-warning-dark);--ifm-button-border-color:var(--ifm-color-warning-dark)}.button--warning.button--active,.button--warning:active{--ifm-button-background-color:var(--ifm-color-warning-darker);--ifm-button-border-color:var(--ifm-color-warning-darker)}:where(.button--danger){--ifm-button-background-color:var(--ifm-color-danger);--ifm-button-border-color:var(--ifm-color-danger)}:where(.button--danger):not(.button--outline):hover{--ifm-button-background-color:var(--ifm-color-danger-dark);--ifm-button-border-color:var(--ifm-color-danger-dark)}.button--danger.button--active,.button--danger:active{--ifm-button-background-color:var(--ifm-color-danger-darker);--ifm-button-border-color:var(--ifm-color-danger-darker)}.button-group{display:inline-flex;gap:var(--ifm-button-group-spacing)}.button-group>.button:not(:first-child){border-bottom-left-radius:0;border-top-left-radius:0}.button-group>.button:not(:last-child){border-bottom-right-radius:0;border-top-right-radius:0}.button-group--block{display:flex;justify-content:stretch}.button-group--block>.button{flex-grow:1}.card{background-color:var(--ifm-card-background-color);border-radius:var(--ifm-card-border-radius);box-shadow:var(--ifm-global-shadow-lw);display:flex;flex-direction:column;overflow:hidden}.card__image{padding-top:var(--ifm-card-vertical-spacing)}.card__image:first-child{padding-top:0}.card__body,.card__footer,.card__header{padding:var(--ifm-card-vertical-spacing) var(--ifm-card-horizontal-spacing)}.card__body:not(:last-child),.card__footer:not(:last-child),.card__header:not(:last-child){padding-bottom:0}.card__body>:last-child,.card__footer>:last-child,.card__header>:last-child{margin-bottom:0}.card__footer{margin-top:auto}.table-of-contents{font-size:.8rem;margin-bottom:0;padding:var(--ifm-toc-padding-vertical) 0}.table-of-contents,.table-of-contents ul{list-style:none;padding-left:var(--ifm-toc-padding-horizontal)}.table-of-contents li{margin:var(--ifm-toc-padding-vertical) var(--ifm-toc-padding-horizontal)}.table-of-contents__left-border{border-left:1px solid var(--ifm-toc-border-color)}.table-of-contents__link{color:var(--ifm-toc-link-color);display:block}.table-of-contents__link--active,.table-of-contents__link--active code,.table-of-contents__link:hover,.table-of-contents__link:hover code{color:var(--ifm-color-primary);text-decoration:none}.close{color:var(--ifm-color-black);float:right;font-size:1.5rem;font-weight:var(--ifm-font-weight-bold);line-height:1;opacity:.5;padding:1rem;transition:opacity var(--ifm-transition-fast) var(--ifm-transition-timing-default)}.close:hover{opacity:.7}.close:focus,.theme-code-block-highlighted-line .codeLineNumber_Tfdd:before{opacity:.8}.dropdown{display:inline-flex;font-weight:var(--ifm-dropdown-font-weight);position:relative;vertical-align:top}.dropdown--hoverable:hover .dropdown__menu,.dropdown--show .dropdown__menu{opacity:1;pointer-events:all;transform:translateY(-1px);visibility:visible}#nprogress,.dropdown__menu,.navbar__item.dropdown .navbar__link:not([href]){pointer-events:none}.dropdown--right .dropdown__menu{left:inherit;right:0}.dropdown--nocaret .navbar__link:after{content:none!important}.dropdown__menu{background-color:var(--ifm-dropdown-background-color);border-radius:var(--ifm-global-radius);box-shadow:var(--ifm-global-shadow-md);left:0;max-height:80vh;min-width:10rem;opacity:0;overflow-y:auto;padding:.5rem;position:absolute;top:calc(100% - var(--ifm-navbar-item-padding-vertical) + .3rem);transform:translateY(-.625rem);transition-duration:var(--ifm-transition-fast);transition-property:opacity,transform,visibility;transition-timing-function:var(--ifm-transition-timing-default);visibility:hidden;z-index:var(--ifm-z-index-dropdown)}.menu__caret,.menu__link,.menu__list-item-collapsible{border-radius:.25rem;transition:background var(--ifm-transition-fast) var(--ifm-transition-timing-default)}.dropdown__link{border-radius:.25rem;color:var(--ifm-dropdown-link-color);display:block;font-size:.875rem;margin-top:.2rem;padding:.25rem .5rem}.dropdown__link--active,.dropdown__link:hover{background-color:var(--ifm-dropdown-hover-background-color);color:var(--ifm-dropdown-link-color);text-decoration:none}.dropdown__link--active,.dropdown__link--active:hover{--ifm-dropdown-link-color:var(--ifm-link-color)}.dropdown>.navbar__link:after{border-color:currentcolor #0000;border-style:solid;border-width:.4em .4em 0;content:"";margin-left:.3em;position:relative;top:2px;transform:translateY(-50%)}.footer{background-color:var(--ifm-footer-background-color);color:var(--ifm-footer-color);padding:var(--ifm-footer-padding-vertical) var(--ifm-footer-padding-horizontal)}.footer--dark{--ifm-footer-background-color:#303846;--ifm-footer-color:var(--ifm-footer-link-color);--ifm-footer-link-color:var(--ifm-color-secondary);--ifm-footer-title-color:var(--ifm-color-white)}.footer__links{margin-bottom:1rem}.footer__link-item{color:var(--ifm-footer-link-color);line-height:2}.footer__link-item:hover{color:var(--ifm-footer-link-hover-color)}.footer__link-separator{margin:0 var(--ifm-footer-link-horizontal-spacing)}.footer__logo{margin-top:1rem;max-width:var(--ifm-footer-logo-max-width)}.footer__title{color:var(--ifm-footer-title-color);font:700 var(--ifm-h4-font-size)/var(--ifm-heading-line-height) var(--ifm-font-family-base);margin-bottom:var(--ifm-heading-margin-bottom)}.menu,.navbar__link{font-weight:var(--ifm-font-weight-semibold)}.docItemContainer_Djhp article>:first-child,.docItemContainer_Djhp header+*,.footer__item{margin-top:0}.admonitionContent_S0QG>:last-child,.cardContainer_fWXF :last-child,.collapsibleContent_i85q>:last-child,.footer__items,.tabItem_Ymn6>:last-child{margin-bottom:0}.codeBlockStandalone_MEMb,[type=checkbox]{padding:0}.hero{align-items:center;background-color:var(--ifm-hero-background-color);color:var(--ifm-hero-text-color);display:flex;padding:4rem 2rem}.hero--primary{--ifm-hero-background-color:var(--ifm-color-primary);--ifm-hero-text-color:var(--ifm-font-color-base-inverse)}.hero--dark{--ifm-hero-background-color:#303846;--ifm-hero-text-color:var(--ifm-color-white)}.hero__title,.title_f1Hy{font-size:3rem}.hero__subtitle{font-size:1.5rem}.menu__list{margin:0;padding-left:0}.menu__caret,.menu__link{padding:var(--ifm-menu-link-padding-vertical) var(--ifm-menu-link-padding-horizontal)}.menu__list .menu__list{flex:0 0 100%;margin-top:.25rem;padding-left:var(--ifm-menu-link-padding-horizontal)}.menu__list-item:not(:first-child){margin-top:.25rem}.menu__list-item--collapsed .menu__list{height:0;overflow:hidden}.details_lb9f[data-collapsed=false].isBrowser_bmU9>summary:before,.details_lb9f[open]:not(.isBrowser_bmU9)>summary:before,.menu__list-item--collapsed .menu__caret:before,.menu__list-item--collapsed .menu__link--sublist:after{transform:rotate(90deg)}.menu__list-item-collapsible{display:flex;flex-wrap:wrap;position:relative}.menu__caret:hover,.menu__link:hover,.menu__list-item-collapsible--active,.menu__list-item-collapsible:hover{background:var(--ifm-menu-color-background-hover)}.menu__list-item-collapsible .menu__link--active,.menu__list-item-collapsible .menu__link:hover{background:none!important}.menu__caret,.menu__link{align-items:center;display:flex}.navbar-sidebar,.navbar-sidebar__backdrop{opacity:0;transition-duration:var(--ifm-transition-fast);transition-timing-function:ease-in-out;left:0;top:0;bottom:0;visibility:hidden}.menu__link{color:var(--ifm-menu-color);flex:1;line-height:1.25}.menu__link:hover{color:var(--ifm-menu-color);text-decoration:none}.menu__caret:before,.menu__link--sublist-caret:after{filter:var(--ifm-menu-link-sublist-icon-filter);height:1.25rem;transform:rotate(180deg);transition:transform var(--ifm-transition-fast) linear;width:1.25rem;content:""}.menu__link--sublist-caret:after{background:var(--ifm-menu-link-sublist-icon) 50%/2rem 2rem;margin-left:auto;min-width:1.25rem}.menu__link--active,.menu__link--active:hover{color:var(--ifm-menu-color-active)}.navbar__brand,.navbar__link{color:var(--ifm-navbar-link-color)}.menu__link--active:not(.menu__link--sublist){background-color:var(--ifm-menu-color-background-active)}.menu__caret:before{background:var(--ifm-menu-link-sublist-icon) 50%/2rem 2rem}.navbar--dark,html[data-theme=dark]{--ifm-menu-link-sublist-icon-filter:invert(100%) sepia(94%) saturate(17%) hue-rotate(223deg) brightness(104%) contrast(98%)}.navbar{background-color:var(--ifm-navbar-background-color);box-shadow:var(--ifm-navbar-shadow);height:var(--ifm-navbar-height);padding:var(--ifm-navbar-padding-vertical) var(--ifm-navbar-padding-horizontal)}.navbar,.navbar>.container,.navbar>.container-fluid{display:flex}.navbar--fixed-top{position:sticky;top:0;z-index:var(--ifm-z-index-fixed)}.navbar__inner{display:flex;flex-wrap:wrap;justify-content:space-between;width:100%}.navbar__brand{align-items:center;display:flex;margin-right:1rem;min-width:0}.navbar__brand:hover{color:var(--ifm-navbar-link-hover-color);text-decoration:none}.announcementBarContent_xLdY,.navbar__title{flex:1 1 auto}.navbar__toggle{display:none;margin-right:.5rem}.navbar__logo{flex:0 0 auto;height:2rem;margin-right:.5rem}.aa-DetachedSearchButton kbd,.aa-FooterCommands kbd{background:var(--key-gradient);box-shadow:var(--key-shadow);height:18px;width:20px}.navbar__items{align-items:center;display:flex;flex:1;min-width:0}.navbar__items--center{flex:0 0 auto}.navbar__items--center .navbar__brand{margin:0}.navbar__items--center+.navbar__items--right{flex:1}.navbar__items--right{flex:0 0 auto;justify-content:flex-end}.navbar__items--right>:last-child{padding-right:0}.navbar__item{display:inline-block;padding:var(--ifm-navbar-item-padding-vertical) var(--ifm-navbar-item-padding-horizontal)}.navbar__link--active,.navbar__link:hover{color:var(--ifm-navbar-link-hover-color);text-decoration:none}.navbar--dark,.navbar--primary{--ifm-menu-color:var(--ifm-color-gray-300);--ifm-navbar-link-color:var(--ifm-color-gray-100);--ifm-navbar-search-input-background-color:#ffffff1a;--ifm-navbar-search-input-placeholder-color:#ffffff80;color:var(--ifm-color-white)}.navbar--dark{--ifm-navbar-background-color:#242526;--ifm-menu-color-background-active:#ffffff0d;--ifm-navbar-search-input-color:var(--ifm-color-white)}.navbar--primary{--ifm-navbar-background-color:var(--ifm-color-primary);--ifm-navbar-link-hover-color:var(--ifm-color-white);--ifm-menu-color-active:var(--ifm-color-white);--ifm-navbar-search-input-color:var(--ifm-color-emphasis-500)}.navbar__search-input{-webkit-appearance:none;appearance:none;background:var(--ifm-navbar-search-input-background-color) var(--ifm-navbar-search-input-icon) no-repeat .75rem center/1rem 1rem;border:none;border-radius:2rem;color:var(--ifm-navbar-search-input-color);cursor:text;display:inline-block;font-size:.9rem;height:2rem;padding:0 .5rem 0 2.25rem;width:12.5rem}.navbar__search-input::placeholder{color:var(--ifm-navbar-search-input-placeholder-color)}.navbar-sidebar{background-color:var(--ifm-navbar-background-color);box-shadow:var(--ifm-global-shadow-md);position:fixed;transform:translate3d(-100%,0,0);transition-property:opacity,visibility,transform;width:var(--ifm-navbar-sidebar-width)}.navbar-sidebar--show .navbar-sidebar,.navbar-sidebar__items{transform:translateZ(0)}.navbar-sidebar--show .navbar-sidebar,.navbar-sidebar--show .navbar-sidebar__backdrop{opacity:1;visibility:visible}.navbar-sidebar__backdrop{background-color:#0009;position:fixed;right:0;transition-property:opacity,visibility}.navbar-sidebar__brand{align-items:center;box-shadow:var(--ifm-navbar-shadow);display:flex;flex:1;height:var(--ifm-navbar-height);padding:var(--ifm-navbar-padding-vertical) var(--ifm-navbar-padding-horizontal)}.navbar-sidebar__items{display:flex;height:calc(100% - var(--ifm-navbar-height));transition:transform var(--ifm-transition-fast) ease-in-out}.navbar-sidebar__items--show-secondary{transform:translate3d(calc((var(--ifm-navbar-sidebar-width))*-1),0,0)}.navbar-sidebar__item{flex-shrink:0;padding:.5rem;width:calc(var(--ifm-navbar-sidebar-width))}.navbar-sidebar__back{background:var(--ifm-menu-color-background-active);font-size:15px;font-weight:var(--ifm-button-font-weight);margin:0 0 .2rem -.5rem;padding:.6rem 1.5rem;position:relative;text-align:left;top:-.5rem;width:calc(100% + 1rem)}.navbar-sidebar__close{display:flex;margin-left:auto}.pagination{column-gap:var(--ifm-pagination-page-spacing);display:flex;font-size:var(--ifm-pagination-font-size);padding-left:0}.pagination--sm{--ifm-pagination-font-size:0.8rem;--ifm-pagination-padding-horizontal:0.8rem;--ifm-pagination-padding-vertical:0.2rem}.pagination--lg{--ifm-pagination-font-size:1.2rem;--ifm-pagination-padding-horizontal:1.2rem;--ifm-pagination-padding-vertical:0.3rem}.pagination__item{display:inline-flex}.pagination__item>span{padding:var(--ifm-pagination-padding-vertical)}.pagination__item--active .pagination__link{color:var(--ifm-pagination-color-active)}.pagination__item--active .pagination__link,.pagination__item:not(.pagination__item--active):hover .pagination__link{background:var(--ifm-pagination-item-active-background)}.pagination__item--disabled,.pagination__item[disabled]{opacity:.25;pointer-events:none}.pagination__link{border-radius:var(--ifm-pagination-border-radius);color:var(--ifm-font-color-base);display:inline-block;padding:var(--ifm-pagination-padding-vertical) var(--ifm-pagination-padding-horizontal);transition:background var(--ifm-transition-fast) var(--ifm-transition-timing-default)}.pagination__link:hover,.sidebarItemLink_mo7H:hover{text-decoration:none}.pagination-nav{grid-gap:var(--ifm-spacing-horizontal);display:grid;gap:var(--ifm-spacing-horizontal);grid-template-columns:repeat(2,1fr)}.pagination-nav__link{border:1px solid var(--ifm-color-emphasis-300);border-radius:var(--ifm-pagination-nav-border-radius);display:block;height:100%;line-height:var(--ifm-heading-line-height);padding:var(--ifm-global-spacing);transition:border-color var(--ifm-transition-fast) var(--ifm-transition-timing-default)}.pagination-nav__link:hover{border-color:var(--ifm-pagination-nav-color-hover);text-decoration:none}.pagination-nav__link--next{grid-column:2/3;text-align:right}.pagination-nav__label{font-size:var(--ifm-h4-font-size);font-weight:var(--ifm-heading-font-weight);word-break:break-word}.pagination-nav__link--prev .pagination-nav__label:before{content:"« "}.pagination-nav__link--next .pagination-nav__label:after{content:" »"}.pagination-nav__sublabel{color:var(--ifm-color-content-secondary);font-size:var(--ifm-h5-font-size);font-weight:var(--ifm-font-weight-semibold);margin-bottom:.25rem}.pills__item,.tabs{font-weight:var(--ifm-font-weight-bold)}.pills{display:flex;gap:var(--ifm-pills-spacing);padding-left:0}.pills__item{border-radius:.5rem;cursor:pointer;display:inline-block;padding:.25rem 1rem;transition:background var(--ifm-transition-fast) var(--ifm-transition-timing-default)}.tabs,:not(.containsTaskList_mC6p>li)>.containsTaskList_mC6p{padding-left:0}.pills__item--active{color:var(--ifm-pills-color-active)}.pills__item--active,.pills__item:not(.pills__item--active):hover{background:var(--ifm-pills-color-background-active)}.pills--block{justify-content:stretch}.pills--block .pills__item{flex-grow:1;text-align:center}.tabs{color:var(--ifm-tabs-color);display:flex;margin-bottom:0;overflow-x:auto}.tabs__item{border-bottom:3px solid #0000;border-radius:var(--ifm-global-radius);cursor:pointer;display:inline-flex;padding:var(--ifm-tabs-padding-vertical) var(--ifm-tabs-padding-horizontal);transition:background-color var(--ifm-transition-fast) var(--ifm-transition-timing-default)}.tabs__item--active{border-bottom-color:var(--ifm-tabs-color-active-border);border-bottom-left-radius:0;border-bottom-right-radius:0;color:var(--ifm-tabs-color-active)}.tabs__item:hover{background-color:var(--ifm-hover-overlay)}.aa-DetachedSearchButton,.toggleButton_gllP:hover{background:var(--ifm-color-emphasis-200)}.tabs--block{justify-content:stretch}.tabs--block .tabs__item{flex-grow:1;justify-content:center}html[data-theme=dark]{--ifm-color-scheme:dark;--ifm-color-emphasis-0:var(--ifm-color-gray-1000);--ifm-color-emphasis-100:var(--ifm-color-gray-900);--ifm-color-emphasis-200:var(--ifm-color-gray-800);--ifm-color-emphasis-300:var(--ifm-color-gray-700);--ifm-color-emphasis-400:var(--ifm-color-gray-600);--ifm-color-emphasis-600:var(--ifm-color-gray-400);--ifm-color-emphasis-700:var(--ifm-color-gray-300);--ifm-color-emphasis-800:var(--ifm-color-gray-200);--ifm-color-emphasis-900:var(--ifm-color-gray-100);--ifm-color-emphasis-1000:var(--ifm-color-gray-0);--ifm-background-color:#1b1b1d;--ifm-background-surface-color:#242526;--ifm-hover-overlay:#ffffff0d;--ifm-color-content:#e3e3e3;--ifm-color-content-secondary:#fff;--ifm-breadcrumb-separator-filter:invert(64%) sepia(11%) saturate(0%) hue-rotate(149deg) brightness(99%) contrast(95%);--ifm-code-background:#ffffff1a;--ifm-scrollbar-track-background-color:#444;--ifm-scrollbar-thumb-background-color:#686868;--ifm-scrollbar-thumb-hover-background-color:#7a7a7a;--ifm-table-stripe-background:#ffffff12;--ifm-toc-border-color:var(--ifm-color-emphasis-200);--ifm-color-primary-contrast-background:#102445;--ifm-color-primary-contrast-foreground:#ebf2fc;--ifm-color-secondary-contrast-background:#474748;--ifm-color-secondary-contrast-foreground:#fdfdfe;--ifm-color-success-contrast-background:#003100;--ifm-color-success-contrast-foreground:#e6f6e6;--ifm-color-info-contrast-background:#193c47;--ifm-color-info-contrast-foreground:#eef9fd;--ifm-color-warning-contrast-background:#4d3800;--ifm-color-warning-contrast-foreground:#fff8e6;--ifm-color-danger-contrast-background:#4b1113;--ifm-color-danger-contrast-foreground:#ffebec}:root{--docusaurus-progress-bar-color:var(--ifm-color-primary);--ifm-color-primary-dark:#002437;--ifm-color-primary-darker:#002234;--ifm-color-primary-darkest:#001c2b;--ifm-color-primary-light:#002c43;--ifm-color-primary-lighter:#002e46;--ifm-color-primary-lightest:#00344f;--ifm-code-font-size:95%;--ifm-link-color:#00ab6b;--docusaurus-highlighted-code-line-bg:#0000001a;--aa-primary-color-rgb:var(--aa-text-color-rgb);--aa-footer-height:3.5rem;--docusaurus-announcement-bar-height:auto;--aa-search-input-height:44px;--aa-input-icon-size:20px;--aa-base-unit:16;--aa-spacing-factor:1;--aa-spacing:calc(var(--aa-base-unit)*var(--aa-spacing-factor)*1px);--aa-spacing-half:calc(var(--aa-spacing)/2);--aa-panel-max-height:650px;--aa-base-z-index:9999;--aa-font-size:calc(var(--aa-base-unit)*1px);--aa-font-family:inherit;--aa-font-weight-medium:500;--aa-font-weight-semibold:600;--aa-font-weight-bold:700;--aa-icon-size:20px;--aa-icon-stroke-width:1.6;--aa-icon-color-alpha:1;--aa-action-icon-size:20px;--aa-text-color-rgb:38,38,39;--aa-text-color-alpha:1;--aa-primary-color-rgb:62,52,211;--aa-primary-color-alpha:0.2;--aa-muted-color-rgb:128,126,163;--aa-muted-color-alpha:0.6;--aa-panel-border-color-rgb:128,126,163;--aa-panel-border-color-alpha:0.3;--aa-input-border-color-rgb:128,126,163;--aa-input-border-color-alpha:0.8;--aa-background-color-rgb:255,255,255;--aa-background-color-alpha:1;--aa-input-background-color-rgb:255,255,255;--aa-input-background-color-alpha:1;--aa-selected-color-rgb:179,173,214;--aa-selected-color-alpha:0.205;--aa-description-highlight-background-color-rgb:245,223,77;--aa-description-highlight-background-color-alpha:0.5;--aa-detached-media-query:(max-width:680px);--aa-detached-modal-media-query:(min-width:680px);--aa-detached-modal-max-width:680px;--aa-detached-modal-max-height:500px;--aa-overlay-color-rgb:115,114,129;--aa-overlay-color-alpha:0.4;--aa-panel-shadow:0 0 0 1px #23263b1a,0 6px 16px -4px #23263b26;--aa-scrollbar-width:13px;--aa-scrollbar-track-background-color-rgb:234,234,234;--aa-scrollbar-track-background-color-alpha:1;--aa-scrollbar-thumb-background-color-alpha:1;--docusaurus-collapse-button-bg:#0000;--docusaurus-collapse-button-bg-hover:#0000001a;--doc-sidebar-width:300px;--doc-sidebar-hidden-width:30px;--docusaurus-tag-list-border:var(--ifm-color-emphasis-300)}#nprogress .bar{background:var(--docusaurus-progress-bar-color);height:2px;left:0;position:fixed;top:0;width:100%;z-index:1031}#nprogress .peg{box-shadow:0 0 10px var(--docusaurus-progress-bar-color),0 0 5px var(--docusaurus-progress-bar-color);height:100%;opacity:1;position:absolute;right:0;transform:rotate(3deg) translateY(-4px);width:100px}[data-theme=dark]{--ifm-color-primary:#21f190;--ifm-color-primary-dark:#0fe883;--ifm-color-primary-darker:#0edb7b;--ifm-color-primary-darkest:#0bb466;--ifm-color-primary-light:#3bf39d;--ifm-color-primary-lighter:#48f3a3;--ifm-color-primary-lightest:#6ef6b7;--ifm-background-color:#00283d!important;--ifm-background-surface-color:#00283d!important}[data-theme=dark] #__docusaurus>footer,[data-theme=light] #__docusaurus>footer{background-color:#00283d}[data-theme=light]{--ifm-color-primary-dark:#00ab6b;--ifm-color-primary-darker:#00947f;--ifm-color-primary-darkest:#007b82;--ifm-color-primary-light:#29d5b0;--ifm-color-primary-lighter:#32d8b4;--ifm-color-primary-lightest:#4fddbf}[data-theme=light] .hero__title{color:#21f190}main a{font-weight:700}.aa-LoadingIndicator,.buttons_AeoN,.youtube-container{align-items:center;display:flex;justify-content:center}.container__quotes .card,.quote_UGhH .card_V6JQ{height:260px}.container__quotes .card__header{height:120px}.container__quotes .card__header .avatar__subtitle{color:grey}.aspect-ratio-16-9{aspect-ratio:16/9;height:auto;width:100%}.aa-DetachedSearchButton{border:none;color:initial;flex-direction:row;gap:.5rem}.aa-DetachedSearchButton kbd{align-items:center;border:0;border-radius:2px;display:flex;justify-content:center;padding:0 0 1px}.aa-DetachedSearchButton kbd:last-child{margin-right:.5em}.aa-DetachedSearchButton .aa-DetachedSearchButtonPlaceholder,.aa-DetachedSearchButton .aa-DetachedSearchButtonQuery{flex:1;white-space:nowrap}[data-theme=dark] .aa-DetachedSearchButton,[data-theme=dark] .aa-Footer{--key-shadow:inset 0 -2px 0 0 #282d55,inset 0 0 1px 1px #51577d,0 2px 2px 0 #0304094d;--key-gradient:linear-gradient(-26.5deg,#565872,#31355b)}.navbar .aa-DetachedSearchButton{height:36px;width:200px}.aa-DetachedContainer .aa-PanelLayout{bottom:var(--aa-footer-height)}.aa-NoResults{font-size:.9em;justify-content:center;padding:2rem}.aa-Footer,.aa-NoResults{align-items:center;color:var(--ifm-color-emphasis-600);display:flex}.aa-Footer{background-color:rgba(var(--aa-background-color-rgb),var(--aa-background-color-alpha));border-top:1px solid rgba(var(--aa-panel-border-color-rgb),var(--aa-panel-border-color-alpha));bottom:0;font-size:.8rem;height:var(--aa-footer-height);justify-content:space-between;padding:1rem;position:absolute;user-select:none;width:100%}.aa-Footer:first-child{border-top:0}.aa-DetachedContainer.aa-DetachedContainer--modal .aa-Footer{position:static}ul.aa-FooterCommands{display:flex;gap:1rem;margin:0;padding:0}ul.aa-FooterCommands li{display:inherit}.aa-FooterCommands kbd{align-items:center;border:0;border-radius:2px;display:flex;justify-content:center;margin-right:.4em;padding:0 0 1px}a.aa-FooterSearchCredit{--ifm-link-hover-color:inherit;--ifm-link-hover-decoration:none;align-items:center;color:inherit;display:flex;gap:1ch}a.aa-FooterSearchCredit svg{min-width:8em}body:not(.navigation-with-keyboard) :not(input):focus{outline:0}#__docusaurus-base-url-issue-banner-container,.aa-ClearButton[hidden],.aa-ItemContent:empty,.aa-LoadingIndicator[hidden],.aa-Source:empty,.aa-SourceHeader:empty,.themedImage_ToTc,[data-theme=dark] .lightToggleIcon_pyhR,[data-theme=light] .darkToggleIcon_wfgR,html[data-announcement-bar-initially-dismissed=true] .announcementBar_mb4j{display:none}.skipToContent_fXgn{background-color:var(--ifm-background-surface-color);color:var(--ifm-color-emphasis-900);left:100%;padding:calc(var(--ifm-global-spacing)/2) var(--ifm-global-spacing);position:fixed;top:1rem;z-index:calc(var(--ifm-z-index-fixed) + 1)}.skipToContent_fXgn:focus{box-shadow:var(--ifm-global-shadow-md);left:1rem}.closeButton_CVFx{line-height:0;padding:0}.content_knG7{font-size:85%;padding:5px 0;text-align:center}.content_knG7 a{color:inherit;text-decoration:underline}.announcementBar_mb4j{align-items:center;background-color:var(--ifm-color-white);border-bottom:1px solid var(--ifm-color-emphasis-100);color:var(--ifm-color-black);display:flex;height:var(--docusaurus-announcement-bar-height)}.announcementBarPlaceholder_vyr4{flex:0 0 10px}.announcementBarClose_gvF7{align-self:stretch;flex:0 0 30px}.toggle_vylO{height:2rem;width:2rem}.aa-Form,.toggleButton_gllP{align-items:center;width:100%;display:flex}.toggleButton_gllP{border-radius:50%;height:100%;justify-content:center;transition:background var(--ifm-transition-fast)}.toggleButtonDisabled_aARS{cursor:not-allowed}.darkNavbarColorModeToggle_X3D1:hover{background:var(--ifm-color-gray-800)}[data-theme=dark] .themedImage--dark_i4oU,[data-theme=light] .themedImage--light_HNdA{display:initial}.iconExternalLink_nPIU{margin-left:.3rem}.iconLanguage_nlXk{margin-right:5px;vertical-align:text-bottom}body.dark,body[data-theme=dark]{--aa-text-color-rgb:183,192,199;--aa-primary-color-rgb:146,138,255;--aa-muted-color-rgb:146,138,255;--aa-input-background-color-rgb:0,3,9;--aa-background-color-rgb:21,24,42;--aa-selected-color-rgb:146,138,255;--aa-selected-color-alpha:0.25;--aa-description-highlight-background-color-rgb:0 255 255;--aa-description-highlight-background-color-alpha:0.25;--aa-panel-shadow:inset 1px 1px 0 0 #2c2e40,0 3px 8px 0 #000309;--aa-scrollbar-track-background-color-rgb:44,46,64}.aa-Autocomplete,.aa-DetachedFormContainer,.aa-Panel{color:#262627;color:rgba(var(--aa-text-color-rgb),var(--aa-text-color-alpha));font-family:inherit;font-family:var(--aa-font-family);font-size:16px;font-size:var(--aa-font-size);font-weight:400;line-height:1em;margin:0;padding:0;text-align:left}.aa-Form{background-color:#fff;background-color:rgba(var(--aa-input-background-color-rgb),var(--aa-input-background-color-alpha));border:1px solid #807ea3cc;border:1px solid rgba(var(--aa-input-border-color-rgb),var(--aa-input-border-color-alpha));border-radius:3px;line-height:1em;margin:0;position:relative}.aa-ClearButton,.aa-Input,.aa-SubmitButton{border:0;background:none}.aa-Form:focus-within{border-color:#3e34d3;border-color:rgba(var(--aa-primary-color-rgb),1);box-shadow:0 0 0 2px #3e34d333,inset 0 0 0 2px #3e34d333;box-shadow:rgba(var(--aa-primary-color-rgb),var(--aa-primary-color-alpha)) 0 0 0 2px,inset rgba(var(--aa-primary-color-rgb),var(--aa-primary-color-alpha)) 0 0 0 2px;outline:currentColor}.aa-InputWrapperPrefix{align-items:center;display:flex;flex-shrink:0;height:44px;height:var(--aa-search-input-height);order:1}.aa-Label,.aa-LoadingIndicator{cursor:auto;flex-shrink:0;height:100%;padding:0;text-align:left}.aa-Label svg,.aa-LoadingIndicator svg{stroke-width:1.6;stroke-width:var(--aa-icon-stroke-width);color:#3e34d3;color:rgba(var(--aa-primary-color-rgb),1);height:auto;max-height:20px;max-height:var(--aa-input-icon-size);width:20px;width:var(--aa-input-icon-size)}.aa-LoadingIndicator,.aa-SubmitButton{height:100%;padding-left:11px;padding-left:calc(var(--aa-spacing)*.75 - 1px);padding-right:8px;padding-right:var(--aa-spacing-half);width:47px;width:calc(var(--aa-spacing)*1.75 + var(--aa-icon-size) - 1px)}.aa-SubmitButton{-webkit-appearance:none;appearance:none;margin:0}.aa-InputWrapper{order:3;position:relative;width:100%}.aa-Input{-webkit-appearance:none;appearance:none;color:#262627;color:rgba(var(--aa-text-color-rgb),var(--aa-text-color-alpha));font:inherit;height:44px;height:var(--aa-search-input-height);padding:0;width:100%}.aa-Input::placeholder{color:#807ea399;color:rgba(var(--aa-muted-color-rgb),var(--aa-muted-color-alpha));opacity:1}.aa-Input:focus{border-color:none;box-shadow:none;outline:0}.aa-Input::-webkit-search-cancel-button,.aa-Input::-webkit-search-decoration,.aa-Input::-webkit-search-results-button,.aa-Input::-webkit-search-results-decoration{-webkit-appearance:none;appearance:none}.aa-InputWrapperSuffix{align-items:center;display:flex;height:44px;height:var(--aa-search-input-height);order:4}.aa-ClearButton{align-items:center;color:#807ea399;color:rgba(var(--aa-muted-color-rgb),var(--aa-muted-color-alpha));cursor:pointer;display:flex;height:100%;margin:0;padding:0 12.83328px;padding:0 calc(var(--aa-spacing)*.83333 - .5px)}.aa-Item,.aa-ItemIcon{align-items:center;border-radius:3px}.aa-ClearButton:focus,.aa-ClearButton:hover,.aa-ItemActionButton:focus svg,.aa-ItemActionButton:hover svg{color:#262627;color:rgba(var(--aa-text-color-rgb),var(--aa-text-color-alpha))}.aa-ClearButton svg{stroke-width:1.6;stroke-width:var(--aa-icon-stroke-width);width:20px;width:var(--aa-icon-size)}.aa-Panel{background-color:#fff;background-color:rgba(var(--aa-background-color-rgb),var(--aa-background-color-alpha));border-radius:4px;border-radius:calc(var(--aa-spacing)/4);box-shadow:0 0 0 1px #23263b1a,0 6px 16px -4px #23263b26;box-shadow:var(--aa-panel-shadow);margin:8px 0 0;overflow:hidden;position:absolute;transition:opacity .2s ease-in,filter .2s ease-in}.aa-Panel button{-webkit-appearance:none;appearance:none;background:none;border:0;margin:0;padding:0}.aa-PanelLayout{height:100%;margin:0;max-height:650px;max-height:var(--aa-panel-max-height);overflow-y:auto;padding:0;position:relative;text-align:left}.aa-PanelLayoutColumns--twoGolden{display:grid;grid-template-columns:39.2% auto;overflow:hidden;padding:0}.aa-PanelLayoutColumns--two{display:grid;grid-template-columns:repeat(2,minmax(0,1fr));overflow:hidden;padding:0}.aa-PanelLayoutColumns--three{display:grid;grid-template-columns:repeat(3,minmax(0,1fr));overflow:hidden;padding:0}.aa-Panel--stalled .aa-Source{filter:grayscale(1);opacity:.8}.aa-Panel--scrollable{margin:0;max-height:650px;max-height:var(--aa-panel-max-height);overflow-x:hidden;overflow-y:auto;padding:8px;padding:var(--aa-spacing-half);scrollbar-color:#fff #eaeaea;scrollbar-color:rgba(var(--aa-scrollbar-thumb-background-color-rgb),var(--aa-scrollbar-thumb-background-color-alpha)) rgba(var(--aa-scrollbar-track-background-color-rgb),var(--aa-scrollbar-track-background-color-alpha));scrollbar-width:thin}.sidebar_re4s,.tableOfContents_bqdL{max-height:calc(100vh - var(--ifm-navbar-height) - 2rem)}.aa-Panel--scrollable::-webkit-scrollbar{width:13px;width:var(--aa-scrollbar-width)}.aa-Panel--scrollable::-webkit-scrollbar-track{background-color:#eaeaea;background-color:rgba(var(--aa-scrollbar-track-background-color-rgb),var(--aa-scrollbar-track-background-color-alpha))}.aa-Panel--scrollable::-webkit-scrollbar-thumb{background-color:#fff;background-color:rgba(var(--aa-scrollbar-thumb-background-color-rgb),var(--aa-scrollbar-thumb-background-color-alpha));border:3px solid #eaeaea;border:3px solid rgba(var(--aa-scrollbar-track-background-color-rgb),var(--aa-scrollbar-track-background-color-alpha));border-radius:9999px;border-right-width:2px}.aa-Source{margin:0;padding:0;position:relative;width:100%}.aa-SourceNoResults{font-size:1em;margin:0;padding:16px;padding:var(--aa-spacing)}.aa-List{margin:0}.aa-List,.aa-SourceHeader{padding:0;position:relative}.aa-SourceHeader{margin:8px .5em 8px 0;margin:var(--aa-spacing-half) .5em var(--aa-spacing-half) 0}.aa-SourceHeaderTitle{background:#fff;background:rgba(var(--aa-background-color-rgb),var(--aa-background-color-alpha));color:#3e34d3;color:rgba(var(--aa-primary-color-rgb),1);display:inline-block;font-size:.8em;font-weight:600;font-weight:var(--aa-font-weight-semibold);margin:0;padding:0 8px 0 0;padding:0 var(--aa-spacing-half) 0 0;position:relative;z-index:9999;z-index:var(--aa-base-z-index)}.aa-SourceHeaderLine{border-bottom:1px solid #3e34d3;border-bottom:1px solid rgba(var(--aa-primary-color-rgb),1);display:block;height:2px;left:0;margin:0;opacity:.3;padding:0;position:absolute;right:0;top:8px;top:var(--aa-spacing-half);z-index:9998;z-index:calc(var(--aa-base-z-index) - 1)}.aa-SourceFooterSeeAll{background:linear-gradient(180deg,#fff,#807ea324);background:linear-gradient(180deg,rgba(var(--aa-background-color-rgb),var(--aa-background-color-alpha)),#807ea324);border:1px solid #807ea399;border:1px solid rgba(var(--aa-muted-color-rgb),var(--aa-muted-color-alpha));border-radius:5px;box-shadow:inset 0 0 2px #fff,0 2px 2px -1px #4c455826;color:inherit;font-size:.95em;font-weight:500;font-weight:var(--aa-font-weight-medium);padding:.475em 1em .6em;-webkit-text-decoration:none;text-decoration:none}.aa-SourceFooterSeeAll:focus,.aa-SourceFooterSeeAll:hover{border:1px solid #3e34d3;border:1px solid rgba(var(--aa-primary-color-rgb),1);color:#3e34d3;color:rgba(var(--aa-primary-color-rgb),1)}.aa-Item{cursor:pointer;display:grid;min-height:40px;min-height:calc(var(--aa-spacing)*2.5);padding:4px;padding:calc(var(--aa-spacing-half)/2)}.aa-Item[aria-selected=true]{background-color:rgba(179,173,214,.205);background-color:rgba(var(--aa-selected-color-rgb),var(--aa-selected-color-alpha))}.aa-Item[aria-selected=true] .aa-ActiveOnly,.aa-Item[aria-selected=true] .aa-ItemActionButton{visibility:visible}.aa-ItemIcon{stroke-width:1.6;stroke-width:var(--aa-icon-stroke-width);background:#fff;background:rgba(var(--aa-background-color-rgb),var(--aa-background-color-alpha));box-shadow:inset 0 0 0 1px #807ea34d;box-shadow:inset 0 0 0 1px rgba(var(--aa-panel-border-color-rgb),var(--aa-panel-border-color-alpha));color:#7777a3;color:rgba(var(--aa-icon-color-rgb),var(--aa-icon-color-alpha));display:flex;flex-shrink:0;font-size:.7em;height:28px;height:calc(var(--aa-icon-size) + var(--aa-spacing-half));justify-content:center;overflow:hidden;text-align:center;width:28px;width:calc(var(--aa-icon-size) + var(--aa-spacing-half))}.aa-ItemIcon img{height:auto;max-height:20px;max-height:calc(var(--aa-icon-size) + var(--aa-spacing-half) - 8px);max-width:20px;max-width:calc(var(--aa-icon-size) + var(--aa-spacing-half) - 8px);width:auto}.aa-ItemIcon svg{height:20px;height:var(--aa-icon-size);width:20px;width:var(--aa-icon-size)}.aa-ItemIcon--alignTop{align-self:flex-start}.aa-ItemIcon--noBorder{background:none;box-shadow:none}.aa-ItemIcon--picture{height:96px;width:96px}.aa-ItemIcon--picture img{max-height:100%;max-width:100%;padding:8px;padding:var(--aa-spacing-half)}.aa-ItemContent{grid-gap:8px;grid-gap:var(--aa-spacing-half);align-items:center;cursor:pointer;display:grid;gap:8px;gap:var(--aa-spacing-half);grid-auto-flow:column;line-height:1.25em;overflow:hidden}.aa-ItemContent mark{background:none;color:#262627;color:rgba(var(--aa-text-color-rgb),var(--aa-text-color-alpha));font-style:normal;font-weight:700;font-weight:var(--aa-font-weight-bold)}.aa-ItemContent--dual{display:flex;flex-direction:column;justify-content:space-between;text-align:left}.aa-ItemContent--dual .aa-ItemContentSubtitle,.aa-ItemContent--dual .aa-ItemContentTitle,.tocCollapsibleContent_vkbj a,[data-theme=dark] .dark_doK1,[data-theme=light] .light_GH8e{display:block}.aa-ItemContent--indented{padding-left:36px;padding-left:calc(var(--aa-icon-size) + var(--aa-spacing))}.aa-ItemContentBody{grid-gap:4px;grid-gap:calc(var(--aa-spacing-half)/2);display:grid;gap:4px;gap:calc(var(--aa-spacing-half)/2)}.aa-ItemContentTitle{display:inline-block;margin:0 .5em 0 0;max-width:100%;overflow:hidden;padding:0;text-overflow:ellipsis}.aa-ItemContentSubtitle{font-size:.92em}.aa-ItemContentSubtitleIcon:before{border-color:#807ea3a3;border-color:rgba(var(--aa-muted-color-rgb),.64);border-style:solid;content:"";display:inline-block;left:1px;position:relative;top:-3px}.aa-PanelFooter:after,.aa-PanelHeader:after{content:"";position:absolute;pointer-events:none;right:0;left:0}.aa-ItemContentSubtitle--inline .aa-ItemContentSubtitleIcon:before{border-width:0 0 1.5px;margin-left:8px;margin-left:var(--aa-spacing-half);margin-right:4px;margin-right:calc(var(--aa-spacing-half)/2);width:10px;width:calc(var(--aa-spacing-half) + 2px)}.aa-ItemContentSubtitle--standalone{grid-gap:8px;grid-gap:var(--aa-spacing-half);align-items:center;color:#262627;color:rgba(var(--aa-text-color-rgb),var(--aa-text-color-alpha));display:grid;gap:8px;gap:var(--aa-spacing-half);grid-auto-flow:column;justify-content:start}.aa-ItemContentSubtitle--standalone .aa-ItemContentSubtitleIcon:before{border-radius:0 0 0 3px;border-width:0 0 1.5px 1.5px;height:8px;height:var(--aa-spacing-half);width:8px;width:var(--aa-spacing-half)}.aa-ItemContentSubtitleCategory{color:#807ea3;color:rgba(var(--aa-muted-color-rgb),1);font-weight:500}.aa-ItemContentDescription{color:#262627;color:rgba(var(--aa-text-color-rgb),var(--aa-text-color-alpha));font-size:.85em;max-width:100%;overflow-x:hidden;text-overflow:ellipsis}.aa-DetachedSearchButtonPlaceholder[hidden],.aa-ItemContentDescription:empty,.docSidebarContainer_b6E3,.sidebarLogo_isFc,[data-theme=dark] .light_GH8e,[data-theme=light] .dark_doK1{display:none}.aa-ItemContentDescription mark{background:#f5df4d80;background:rgba(var(--aa-description-highlight-background-color-rgb),var(--aa-description-highlight-background-color-alpha));color:#262627;color:rgba(var(--aa-text-color-rgb),var(--aa-text-color-alpha));font-style:normal;font-weight:500;font-weight:var(--aa-font-weight-medium)}.aa-ItemContentDash{color:#807ea399;color:rgba(var(--aa-muted-color-rgb),var(--aa-muted-color-alpha));display:none;opacity:.4}.aa-ItemContentTag{background-color:#3e34d333;background-color:rgba(var(--aa-primary-color-rgb),var(--aa-primary-color-alpha));border-radius:3px;margin:0 .4em 0 0;padding:.08em .3em}.aa-ItemLink,.aa-ItemWrapper{grid-gap:4px;grid-gap:calc(var(--aa-spacing-half)/2);align-items:center;color:inherit;display:grid;gap:4px;gap:calc(var(--aa-spacing-half)/2);grid-auto-flow:column;justify-content:space-between;width:100%}.aa-ItemLink{color:inherit;-webkit-text-decoration:none;text-decoration:none}.aa-ItemActions{display:grid;grid-auto-flow:column;height:100%;justify-self:end;margin:0 -5.33333px;margin:0 calc(var(--aa-spacing)/-3);padding:0 2px 0 0}.aa-ItemActionButton{align-items:center;background:none;border:0;color:#807ea399;color:rgba(var(--aa-muted-color-rgb),var(--aa-muted-color-alpha));cursor:pointer;display:flex;flex-shrink:0;padding:0}.aa-ItemActionButton svg{stroke-width:1.6;stroke-width:var(--aa-icon-stroke-width);color:#807ea399;color:rgba(var(--aa-muted-color-rgb),var(--aa-muted-color-alpha));margin:5.33333px;margin:calc(var(--aa-spacing)/3);width:20px;width:var(--aa-action-icon-size)}.aa-ActiveOnly{visibility:hidden}.aa-PanelHeader{align-items:center;background:#3e34d3;background:rgba(var(--aa-primary-color-rgb),1);color:#fff;display:grid;height:var(--aa-modal-header-height);margin:0;padding:8px 16px;padding:var(--aa-spacing-half) var(--aa-spacing);position:relative}.aa-PanelHeader:after{background-image:linear-gradient(#fff,#fff0);background-image:linear-gradient(rgba(var(--aa-background-color-rgb),1),rgba(var(--aa-background-color-rgb),0));bottom:-8px;bottom:calc(var(--aa-spacing-half)*-1);height:8px;height:var(--aa-spacing-half)}.aa-PanelFooter,.aa-PanelHeader:after{z-index:9999;z-index:var(--aa-base-z-index)}.aa-PanelFooter{background-color:#fff;background-color:rgba(var(--aa-background-color-rgb),var(--aa-background-color-alpha));box-shadow:inset 0 1px 0 #807ea34d;box-shadow:inset 0 1px 0 rgba(var(--aa-panel-border-color-rgb),var(--aa-panel-border-color-alpha));display:flex;justify-content:space-between;margin:0;padding:16px;padding:var(--aa-spacing);position:relative}.aa-PanelFooter:after{background-image:linear-gradient(#fff0,#807ea399);background-image:linear-gradient(rgba(var(--aa-background-color-rgb),0),rgba(var(--aa-muted-color-rgb),var(--aa-muted-color-alpha)));height:16px;height:var(--aa-spacing);opacity:.12;top:-16px;top:calc(var(--aa-spacing)*-1);z-index:9998;z-index:calc(var(--aa-base-z-index) - 1)}.aa-DetachedContainer{background:#fff;background:rgba(var(--aa-background-color-rgb),var(--aa-background-color-alpha));bottom:0;box-shadow:0 0 0 1px #23263b1a,0 6px 16px -4px #23263b26;box-shadow:var(--aa-panel-shadow);display:flex;flex-direction:column;left:0;margin:0;overflow:hidden;padding:0;position:fixed;right:0;top:0;z-index:9999;z-index:var(--aa-base-z-index)}.aa-DetachedContainer:after{height:32px}.aa-DetachedContainer .aa-SourceHeader{margin:8px 0 8px 2px;margin:var(--aa-spacing-half) 0 var(--aa-spacing-half) 2px}.aa-DetachedContainer .aa-Panel{background-color:#fff;background-color:rgba(var(--aa-background-color-rgb),var(--aa-background-color-alpha));border-radius:0;box-shadow:none;flex-grow:1;margin:0;padding:0;position:relative}.aa-DetachedContainer .aa-PanelLayout{bottom:0;box-shadow:none;left:0;margin:0;max-height:none;overflow-y:auto;position:absolute;right:0;top:0;width:100%}.aa-DetachedFormContainer{border-bottom:1px solid #807ea34d;border-bottom:1px solid rgba(var(--aa-panel-border-color-rgb),var(--aa-panel-border-color-alpha));display:flex;flex-direction:row;justify-content:space-between;margin:0;padding:8px;padding:var(--aa-spacing-half)}.aa-DetachedCancelButton{background:none;border:0;border-radius:3px;color:#262627;color:rgba(var(--aa-text-color-rgb),var(--aa-text-color-alpha));cursor:pointer;font:inherit;margin:0 0 0 8px;margin:0 0 0 var(--aa-spacing-half);padding:0 8px;padding:0 var(--aa-spacing-half)}.aa-DetachedCancelButton:focus,.aa-DetachedCancelButton:hover{box-shadow:inset 0 0 0 1px #807ea34d;box-shadow:inset 0 0 0 1px rgba(var(--aa-panel-border-color-rgb),var(--aa-panel-border-color-alpha))}.aa-DetachedContainer--modal{border-radius:6px;bottom:inherit;height:auto;margin:0 auto;max-width:680px;max-width:var(--aa-detached-modal-max-width);position:absolute;top:3%}.aa-DetachedContainer--modal .aa-PanelLayout{max-height:500px;max-height:var(--aa-detached-modal-max-height);padding-bottom:8px;padding-bottom:var(--aa-spacing-half);position:static}.aa-DetachedSearchButton{align-items:center;background-color:#fff;background-color:rgba(var(--aa-input-background-color-rgb),var(--aa-input-background-color-alpha));border:1px solid #807ea3cc;border:1px solid rgba(var(--aa-input-border-color-rgb),var(--aa-input-border-color-alpha));border-radius:3px;color:#807ea399;color:rgba(var(--aa-muted-color-rgb),var(--aa-muted-color-alpha));cursor:pointer;display:flex;font:inherit;font-family:inherit;font-family:var(--aa-font-family);font-size:16px;font-size:var(--aa-font-size);height:44px;height:var(--aa-search-input-height);margin:0;padding:0 5.5px;padding:0 calc(var(--aa-search-input-height)/8);position:relative;text-align:left;width:100%}.aa-DetachedSearchButton:focus{border-color:#3e34d3;border-color:rgba(var(--aa-primary-color-rgb),1);box-shadow:0 0 0 3px #3e34d333,inset 0 0 0 2px #3e34d333;box-shadow:rgba(var(--aa-primary-color-rgb),var(--aa-primary-color-alpha)) 0 0 0 3px,inset rgba(var(--aa-primary-color-rgb),var(--aa-primary-color-alpha)) 0 0 0 2px;outline:currentColor}.aa-DetachedSearchButtonIcon{align-items:center;color:#3e34d3;color:rgba(var(--aa-primary-color-rgb),1);cursor:auto;display:flex;flex-shrink:0;height:100%;justify-content:center;width:36px;width:calc(var(--aa-icon-size) + var(--aa-spacing))}.aa-DetachedSearchButtonQuery{color:#262627;color:rgba(var(--aa-text-color-rgb),1);line-height:1.25em;overflow:hidden;text-overflow:ellipsis}.aa-Detached{height:100vh;overflow:hidden}.aa-DetachedOverlay{background-color:#73728166;background-color:rgba(var(--aa-overlay-color-rgb),var(--aa-overlay-color-alpha));height:100vh;left:0;margin:0;padding:0;position:fixed;right:0;top:0;z-index:9998;z-index:calc(var(--aa-base-z-index) - 1)}.aa-GradientBottom,.aa-GradientTop{height:8px;height:var(--aa-spacing-half);left:0;pointer-events:none;position:absolute;right:0;z-index:9999;z-index:var(--aa-base-z-index)}.aa-GradientTop{background-image:linear-gradient(#fff,#fff0);background-image:linear-gradient(rgba(var(--aa-background-color-rgb),1),rgba(var(--aa-background-color-rgb),0));top:0}.aa-GradientBottom{background-image:linear-gradient(#fff0,#fff);background-image:linear-gradient(rgba(var(--aa-background-color-rgb),0),rgba(var(--aa-background-color-rgb),1));border-bottom-left-radius:4px;border-bottom-left-radius:calc(var(--aa-spacing)/4);border-bottom-right-radius:4px;border-bottom-right-radius:calc(var(--aa-spacing)/4);bottom:0}.navbarHideable_m1mJ{transition:transform var(--ifm-transition-fast) ease}.navbarHidden_jGov{transform:translate3d(0,calc(-100% - 2px),0)}.errorBoundaryError_a6uf{color:red;white-space:pre-wrap}.footerLogoLink_BH7S{opacity:.5;transition:opacity var(--ifm-transition-fast) var(--ifm-transition-timing-default)}.footerLogoLink_BH7S:hover,.hash-link:focus,:hover>.hash-link{opacity:1}.mainWrapper_z2l0{display:flex;flex:1 0 auto;flex-direction:column}.docusaurus-mt-lg{margin-top:3rem}#__docusaurus{display:flex;flex-direction:column;min-height:100%}.tableOfContentsInline_prmo ul{font-size:medium;list-style-type:disc;padding-top:0}.cardContainer_fWXF{--ifm-link-color:var(--ifm-color-emphasis-800);--ifm-link-hover-color:var(--ifm-color-emphasis-700);--ifm-link-hover-decoration:none;border:1px solid var(--ifm-color-emphasis-200);box-shadow:0 1.5px 3px 0 #00000026;transition:all var(--ifm-transition-fast) ease;transition-property:border,box-shadow}.cardContainer_fWXF:hover{border-color:var(--ifm-color-primary);box-shadow:0 3px 6px 0 #0003}.cardTitle_rnsV{font-size:1.2rem}.cardDescription_PWke{font-size:.8rem}.backToTopButton_sjWU{background-color:var(--ifm-color-emphasis-200);border-radius:50%;bottom:1.3rem;box-shadow:var(--ifm-global-shadow-lw);height:3rem;opacity:0;position:fixed;right:1.3rem;transform:scale(0);transition:all var(--ifm-transition-fast) var(--ifm-transition-timing-default);visibility:hidden;width:3rem;z-index:calc(var(--ifm-z-index-fixed) - 1)}.backToTopButton_sjWU:after{background-color:var(--ifm-color-emphasis-1000);content:" ";display:inline-block;height:100%;-webkit-mask:var(--ifm-menu-link-sublist-icon) 50%/2rem 2rem no-repeat;mask:var(--ifm-menu-link-sublist-icon) 50%/2rem 2rem no-repeat;width:100%}.backToTopButtonShow_xfvO{opacity:1;transform:scale(1);visibility:visible}[data-theme=dark]:root{--docusaurus-collapse-button-bg:#ffffff0d;--docusaurus-collapse-button-bg-hover:#ffffff1a}.collapseSidebarButton_PEFL{display:none;margin:0}.docMainContainer_gTbr,.docPage__5DB{display:flex;width:100%}.docPage__5DB{flex:1 0}.docsWrapper_BCFX{display:flex;flex:1 0 auto}.sidebar_re4s{overflow-y:auto;position:sticky;top:calc(var(--ifm-navbar-height) + 2rem)}.sidebarItemTitle_pO2u{font-size:var(--ifm-h3-font-size);font-weight:var(--ifm-font-weight-bold)}.container_mt6G,.sidebarItemList_Yudw{font-size:.9rem}.sidebarItem__DBe{margin-top:.7rem}.sidebarItemLink_mo7H{color:var(--ifm-font-color-base);display:block}.sidebarItemLinkActive_I1ZP{color:var(--ifm-color-primary)!important}.buttonGroup__atx button,.codeBlockContainer_Ckt0{background:var(--prism-background-color);color:var(--prism-color)}.authorCol_Hf19{flex-grow:1!important;max-width:inherit!important}.imageOnlyAuthorRow_pa_O{display:flex;flex-flow:row wrap}.imageOnlyAuthorCol_G86a{margin-left:.3rem;margin-right:.3rem}.features_t9lD,.video_xvMC{align-items:center;display:flex;justify-content:center;padding:2rem 0;width:100%}.featureSvg_GfXr{height:200px;width:200px}.quotes_tXTu{display:flex;justify-content:space-between}.quote_UGhH{flex-basis:auto;width:33%}.video_xvMC iframe{height:20em}.video_xvMC iframe,.why_A8BT iframe{left:0;position:relative;top:0;width:100%}.why_A8BT iframe{height:25em}.heroBanner_qdFl{overflow:hidden;padding:2rem 0;position:relative;text-align:center}.codeBlockContainer_Ckt0{border-radius:var(--ifm-code-border-radius);box-shadow:var(--ifm-global-shadow-lw);margin-bottom:var(--ifm-leading)}.codeBlockContent_biex{border-radius:inherit;direction:ltr;position:relative}.codeBlockTitle_Ktv7{border-bottom:1px solid var(--ifm-color-emphasis-300);border-top-left-radius:inherit;border-top-right-radius:inherit;font-size:var(--ifm-code-font-size);font-weight:500;padding:.75rem var(--ifm-pre-padding)}.codeBlock_bY9V{--ifm-pre-background:var(--prism-background-color);margin:0;padding:0}.codeBlockTitle_Ktv7+.codeBlockContent_biex .codeBlock_bY9V{border-top-left-radius:0;border-top-right-radius:0}.codeBlockLines_e6Vv{float:left;font:inherit;min-width:100%;padding:var(--ifm-pre-padding)}.codeBlockLinesWithNumbering_o6Pm{display:table;padding:var(--ifm-pre-padding) 0}.buttonGroup__atx{column-gap:.2rem;display:flex;position:absolute;right:calc(var(--ifm-pre-padding)/2);top:calc(var(--ifm-pre-padding)/2)}.buttonGroup__atx button{align-items:center;border:1px solid var(--ifm-color-emphasis-300);border-radius:var(--ifm-global-radius);display:flex;line-height:0;opacity:0;padding:.4rem;transition:opacity var(--ifm-transition-fast) ease-in-out}.buttonGroup__atx button:focus-visible,.buttonGroup__atx button:hover{opacity:1!important}.theme-code-block:hover .buttonGroup__atx button{opacity:.4}.iconEdit_Z9Sw{margin-right:.3em;vertical-align:sub}:where(:root){--docusaurus-highlighted-code-line-bg:#484d5b}:where([data-theme=dark]){--docusaurus-highlighted-code-line-bg:#646464}.theme-code-block-highlighted-line{background-color:var(--docusaurus-highlighted-code-line-bg);display:block;margin:0 calc(var(--ifm-pre-padding)*-1);padding:0 var(--ifm-pre-padding)}.codeLine_lJS_{counter-increment:a;display:table-row}.codeLineNumber_Tfdd{background:var(--ifm-pre-background);display:table-cell;left:0;overflow-wrap:normal;padding:0 var(--ifm-pre-padding);position:sticky;text-align:right;width:1%}.codeLineNumber_Tfdd:before{content:counter(a);opacity:.4}.codeLineContent_feaV{padding-right:var(--ifm-pre-padding)}.tag_zVej{border:1px solid var(--docusaurus-tag-list-border);transition:border var(--ifm-transition-fast)}.tag_zVej:hover{--docusaurus-tag-list-border:var(--ifm-link-color);text-decoration:none}.tagRegular_sFm0{border-radius:var(--ifm-global-radius);font-size:90%;padding:.2rem .5rem .3rem}.tagWithCount_h2kH{align-items:center;border-left:0;display:flex;padding:0 .5rem 0 1rem;position:relative}.tagWithCount_h2kH:after,.tagWithCount_h2kH:before{border:1px solid var(--docusaurus-tag-list-border);content:"";position:absolute;top:50%;transition:inherit}.tagWithCount_h2kH:before{border-bottom:0;border-right:0;height:1.18rem;right:100%;transform:translate(50%,-50%) rotate(-45deg);width:1.18rem}.tagWithCount_h2kH:after{border-radius:50%;height:.5rem;left:0;transform:translateY(-50%);width:.5rem}.tagWithCount_h2kH span{background:var(--ifm-color-secondary);border-radius:var(--ifm-global-radius);color:var(--ifm-color-black);font-size:.7rem;line-height:1.2;margin-left:.3rem;padding:.1rem .4rem}.theme-code-block:hover .copyButtonCopied_obH4{opacity:1!important}.copyButtonIcons_eSgA{height:1.125rem;position:relative;width:1.125rem}.copyButtonIcon_y97N,.copyButtonSuccessIcon_LjdS{fill:currentColor;height:inherit;left:0;opacity:inherit;position:absolute;top:0;transition:all var(--ifm-transition-fast) ease;width:inherit}.copyButtonSuccessIcon_LjdS{color:#00d600;left:50%;opacity:0;top:50%;transform:translate(-50%,-50%) scale(.33)}.copyButtonCopied_obH4 .copyButtonIcon_y97N{opacity:0;transform:scale(.33)}.copyButtonCopied_obH4 .copyButtonSuccessIcon_LjdS{opacity:1;transform:translate(-50%,-50%) scale(1);transition-delay:75ms}.tags_jXut{display:inline}.tag_QGVx{display:inline-block;margin:0 .4rem .5rem 0}.lastUpdated_vwxv{font-size:smaller;font-style:italic;margin-top:.2rem}.tocCollapsibleButton_TO0P{align-items:center;display:flex;font-size:inherit;justify-content:space-between;padding:.4rem .8rem;width:100%}.tocCollapsibleButton_TO0P:after{background:var(--ifm-menu-link-sublist-icon) 50% 50%/2rem 2rem no-repeat;content:"";filter:var(--ifm-menu-link-sublist-icon-filter);height:1.25rem;transform:rotate(180deg);transition:transform var(--ifm-transition-fast);width:1.25rem}.tocCollapsibleButtonExpanded_MG3E:after,.tocCollapsibleExpanded_sAul{transform:none}.tocCollapsible_ETCw{background-color:var(--ifm-menu-color-background-active);border-radius:var(--ifm-global-radius);margin:1rem 0}.tocCollapsibleContent_vkbj>ul{border-left:none;border-top:1px solid var(--ifm-color-emphasis-300);font-size:15px;padding:.2rem 0}.tocCollapsibleContent_vkbj ul li{margin:.4rem .8rem}.wordWrapButtonIcon_Bwma{height:1.2rem;width:1.2rem}.details_lb9f{--docusaurus-details-summary-arrow-size:0.38rem;--docusaurus-details-transition:transform 200ms ease;--docusaurus-details-decoration-color:grey}.details_lb9f>summary{cursor:pointer;padding-left:1rem;position:relative}.details_lb9f>summary::-webkit-details-marker{display:none}.details_lb9f>summary:before{border-color:#0000 #0000 #0000 var(--docusaurus-details-decoration-color);border-style:solid;border-width:var(--docusaurus-details-summary-arrow-size);content:"";left:0;position:absolute;top:.45rem;transform:rotate(0);transform-origin:calc(var(--docusaurus-details-summary-arrow-size)/2) 50%;transition:var(--docusaurus-details-transition)}.collapsibleContent_i85q{border-top:1px solid var(--docusaurus-details-decoration-color);margin-top:1rem;padding-top:1rem}.details_b_Ee{--docusaurus-details-decoration-color:var(--ifm-alert-border-color);--docusaurus-details-transition:transform var(--ifm-transition-fast) ease;border:1px solid var(--ifm-alert-border-color);margin:0 0 var(--ifm-spacing-vertical)}.anchorWithStickyNavbar_LWe7{scroll-margin-top:calc(var(--ifm-navbar-height) + .5rem)}.anchorWithHideOnScrollNavbar_WYt5{scroll-margin-top:.5rem}.hash-link{opacity:0;padding-left:.5rem;transition:opacity var(--ifm-transition-fast);-webkit-user-select:none;user-select:none}.hash-link:before{content:"#"}.img_ev3q{height:auto}.admonition_LlT9{margin-bottom:1em}.admonitionHeading_tbUL{font:var(--ifm-heading-font-weight) var(--ifm-h5-font-size)/var(--ifm-heading-line-height) var(--ifm-heading-font-family);margin-bottom:.3rem}.admonitionHeading_tbUL code{text-transform:none}.admonitionIcon_kALy{display:inline-block;margin-right:.4em;vertical-align:middle}.admonitionIcon_kALy svg{fill:var(--ifm-alert-foreground-color);display:inline-block;height:1.6em;width:1.6em}.blogPostFooterDetailsFull_mRVl{flex-direction:column}.tableOfContents_bqdL{overflow-y:auto;position:sticky;top:calc(var(--ifm-navbar-height) + 1rem)}.breadcrumbHomeIcon_YNFT{height:1.1rem;position:relative;top:1px;vertical-align:top;width:1.1rem}.breadcrumbsContainer_Z_bl{--ifm-breadcrumb-size-multiplier:0.8;margin-bottom:.8rem}.title_kItE{--ifm-h1-font-size:3rem;margin-bottom:calc(var(--ifm-leading)*1.25)}.mdxPageWrapper_j9I6{justify-content:center}@media (min-width:997px){.collapseSidebarButton_PEFL,.expandButton_m80_{background-color:var(--docusaurus-collapse-button-bg)}:root{--docusaurus-announcement-bar-height:30px}.announcementBarClose_gvF7,.announcementBarPlaceholder_vyr4{flex-basis:50px}.searchBox_ZlJk{padding:var(--ifm-navbar-item-padding-vertical) var(--ifm-navbar-item-padding-horizontal)}.collapseSidebarButton_PEFL{border:1px solid var(--ifm-toc-border-color);border-radius:0;bottom:0;display:block!important;height:40px;position:sticky}.collapseSidebarButtonIcon_kv0_{margin-top:4px;transform:rotate(180deg)}.expandButtonIcon_BlDH,[dir=rtl] .collapseSidebarButtonIcon_kv0_{transform:rotate(0)}.collapseSidebarButton_PEFL:focus,.collapseSidebarButton_PEFL:hover,.expandButton_m80_:focus,.expandButton_m80_:hover{background-color:var(--docusaurus-collapse-button-bg-hover)}.menuHtmlItem_M9Kj{padding:var(--ifm-menu-link-padding-vertical) var(--ifm-menu-link-padding-horizontal)}.menu_SIkG{flex-grow:1;padding:.5rem}@supports (scrollbar-gutter:stable){.menu_SIkG{padding:.5rem 0 .5rem .5rem;scrollbar-gutter:stable}}.menuWithAnnouncementBar_GW3s{margin-bottom:var(--docusaurus-announcement-bar-height)}.sidebar_njMd{display:flex;flex-direction:column;height:100%;padding-top:var(--ifm-navbar-height);width:var(--doc-sidebar-width)}.sidebarWithHideableNavbar_wUlq{padding-top:0}.sidebarHidden_VK0M{opacity:0;visibility:hidden}.sidebarLogo_isFc{align-items:center;color:inherit!important;display:flex!important;margin:0 var(--ifm-navbar-padding-horizontal);max-height:var(--ifm-navbar-height);min-height:var(--ifm-navbar-height);text-decoration:none!important}.sidebarLogo_isFc img{height:2rem;margin-right:.5rem}.expandButton_m80_{align-items:center;display:flex;height:100%;justify-content:center;position:absolute;right:0;top:0;transition:background-color var(--ifm-transition-fast) ease;width:100%}[dir=rtl] .expandButtonIcon_BlDH{transform:rotate(180deg)}.docSidebarContainer_b6E3{border-right:1px solid var(--ifm-toc-border-color);-webkit-clip-path:inset(0);clip-path:inset(0);display:block;margin-top:calc(var(--ifm-navbar-height)*-1);transition:width var(--ifm-transition-fast) ease;width:var(--doc-sidebar-width);will-change:width}.docSidebarContainerHidden_b3ry{cursor:pointer;width:var(--doc-sidebar-hidden-width)}.sidebarViewport_Xe31{height:100%;max-height:100vh;position:sticky;top:0}.docMainContainer_gTbr{flex-grow:1;max-width:calc(100% - var(--doc-sidebar-width))}.docMainContainerEnhanced_Uz_u{max-width:calc(100% - var(--doc-sidebar-hidden-width))}.docItemWrapperEnhanced_czyv{max-width:calc(var(--ifm-container-width) + var(--doc-sidebar-width))!important}.lastUpdated_vwxv{text-align:right}.tocMobile_ITEo{display:none}.docItemCol_VOVn,.generatedIndexPage_vN6x{max-width:75%!important}.list_eTzJ article:nth-last-child(-n+2){margin-bottom:0!important}}@media (min-width:1440px){.container{max-width:var(--ifm-container-width-xl)}}@media (max-width:996px){.col{--ifm-col-width:100%;flex-basis:var(--ifm-col-width);margin-left:0}.footer{--ifm-footer-padding-horizontal:0}.colorModeToggle_DEke,.footer__link-separator,.navbar__item,.sidebar_re4s,.tableOfContents_bqdL{display:none}.footer__col{margin-bottom:calc(var(--ifm-spacing-vertical)*3)}.footer__link-item{display:block}.hero{padding-left:0;padding-right:0}.navbar>.container,.navbar>.container-fluid{padding:0}.navbar__toggle{display:inherit}.navbar__search-input{width:9rem}.pills--block,.tabs--block{flex-direction:column}.searchBox_ZlJk{position:absolute;right:var(--ifm-navbar-padding-horizontal)}.docItemContainer_F8PC{padding:0 .3rem}}@media screen and (max-width:996px){.container__quotes .card,.container__quotes .card__header,.quote_UGhH .card_V6JQ{height:auto}.quotes_tXTu{flex-direction:column;padding:0 1em}.quote_UGhH{margin-bottom:1em;width:100%}.heroBanner_qdFl{padding:2rem}}@media (max-width:576px){.markdown h1:first-child{--ifm-h1-font-size:2rem}.markdown>h2{--ifm-h2-font-size:1.5rem}.markdown>h3{--ifm-h3-font-size:1.25rem}.title_f1Hy{font-size:2rem}}@media (hover:hover){.backToTopButton_sjWU:hover{background-color:var(--ifm-color-emphasis-300)}.aa-TouchOnly{display:none}}@media (hover:none) and (pointer:coarse){:root{--aa-spacing-factor:1.2;--aa-action-icon-size:22px}.aa-LoadingIndicator,.aa-SubmitButton{padding-left:3px;padding-left:calc(var(--aa-spacing-half)/ 2 - 1px);width:39px;width:calc(var(--aa-icon-size) + var(--aa-spacing)*1.25 - 1px)}.aa-ClearButton{padding:0 10.16672px;padding:0 calc(var(--aa-spacing)*.66667 - .5px)}.aa-ItemActionButton:focus svg,.aa-ItemActionButton:hover svg{color:inherit}.aa-DesktopOnly{display:none}}@media (pointer:fine){.thin-scrollbar{scrollbar-width:thin}.thin-scrollbar::-webkit-scrollbar{height:var(--ifm-scrollbar-size);width:var(--ifm-scrollbar-size)}.thin-scrollbar::-webkit-scrollbar-track{background:var(--ifm-scrollbar-track-background-color);border-radius:10px}.thin-scrollbar::-webkit-scrollbar-thumb{background:var(--ifm-scrollbar-thumb-background-color);border-radius:10px}.thin-scrollbar::-webkit-scrollbar-thumb:hover{background:var(--ifm-scrollbar-thumb-hover-background-color)}}@media (prefers-reduced-motion:reduce){:root{--ifm-transition-fast:0ms;--ifm-transition-slow:0ms}}@media screen and (prefers-reduced-motion){.aa-Panel{transition:none}}@media print{.announcementBar_mb4j,.footer,.menu,.navbar,.pagination-nav,.table-of-contents,.tocMobile_ITEo{display:none}.tabs{page-break-inside:avoid}.codeBlockLines_e6Vv{white-space:pre-wrap}} \ No newline at end of file diff --git a/assets/images/app-workspace-static-deployed-76f8e3279aa037894d45647b7528261e.png b/assets/images/app-workspace-static-deployed-76f8e3279aa037894d45647b7528261e.png new file mode 100644 index 00000000000..ec9d165785d Binary files /dev/null and b/assets/images/app-workspace-static-deployed-76f8e3279aa037894d45647b7528261e.png differ diff --git a/assets/images/cloud-apps-empty-6b8f78009b9da6a74e8f03754b50c9c0.png b/assets/images/cloud-apps-empty-6b8f78009b9da6a74e8f03754b50c9c0.png new file mode 100644 index 00000000000..e9c149352eb Binary files /dev/null and b/assets/images/cloud-apps-empty-6b8f78009b9da6a74e8f03754b50c9c0.png differ diff --git a/assets/images/compose-openapi-336d2ae348d9fc45815c099b25e9b7ed.png b/assets/images/compose-openapi-336d2ae348d9fc45815c099b25e9b7ed.png new file mode 100644 index 00000000000..831c0e28c8c Binary files /dev/null and b/assets/images/compose-openapi-336d2ae348d9fc45815c099b25e9b7ed.png differ diff --git a/assets/images/frontend-screenshot-1-f022d8dda7cf260804f2993653ef2672.jpg b/assets/images/frontend-screenshot-1-f022d8dda7cf260804f2993653ef2672.jpg new file mode 100644 index 00000000000..345b7be85c5 Binary files /dev/null and b/assets/images/frontend-screenshot-1-f022d8dda7cf260804f2993653ef2672.jpg differ diff --git a/assets/images/frontend-screenshot-2-7063f363ce4395cfcbe7bfc7b14f27aa.jpg b/assets/images/frontend-screenshot-2-7063f363ce4395cfcbe7bfc7b14f27aa.jpg new file mode 100644 index 00000000000..be7ec8c6d0c Binary files /dev/null and b/assets/images/frontend-screenshot-2-7063f363ce4395cfcbe7bfc7b14f27aa.jpg differ diff --git a/assets/images/frontend-screenshot-3-bd3cbacdf584cab7377be119f5986669.jpg b/assets/images/frontend-screenshot-3-bd3cbacdf584cab7377be119f5986669.jpg new file mode 100644 index 00000000000..1774ffadf7d Binary files /dev/null and b/assets/images/frontend-screenshot-3-bd3cbacdf584cab7377be119f5986669.jpg differ diff --git a/assets/images/frontend-screenshot-4-e22c1f77cabb7e952ff8ea437349da4c.jpg b/assets/images/frontend-screenshot-4-e22c1f77cabb7e952ff8ea437349da4c.jpg new file mode 100644 index 00000000000..264c18ea46b Binary files /dev/null and b/assets/images/frontend-screenshot-4-e22c1f77cabb7e952ff8ea437349da4c.jpg differ diff --git a/assets/images/github-pr-deploy-comment-720f81cbacab20ce77fbd20206dafd51.png b/assets/images/github-pr-deploy-comment-720f81cbacab20ce77fbd20206dafd51.png new file mode 100644 index 00000000000..7db8c2ceeed Binary files /dev/null and b/assets/images/github-pr-deploy-comment-720f81cbacab20ce77fbd20206dafd51.png differ diff --git a/assets/images/github-pr-deploy-in-progress-95105f47cd34c895071ef4cca94754aa.png b/assets/images/github-pr-deploy-in-progress-95105f47cd34c895071ef4cca94754aa.png new file mode 100644 index 00000000000..a115a640646 Binary files /dev/null and b/assets/images/github-pr-deploy-in-progress-95105f47cd34c895071ef4cca94754aa.png differ diff --git a/assets/images/hello-json-response-f52762d42d5844974b2a37d050c328a3.png b/assets/images/hello-json-response-f52762d42d5844974b2a37d050c328a3.png new file mode 100644 index 00000000000..189af13f083 Binary files /dev/null and b/assets/images/hello-json-response-f52762d42d5844974b2a37d050c328a3.png differ diff --git a/assets/images/http-16e850f4929a4d5c21a629cbe609b67e.png b/assets/images/http-16e850f4929a4d5c21a629cbe609b67e.png new file mode 100644 index 00000000000..f5221d759d7 Binary files /dev/null and b/assets/images/http-16e850f4929a4d5c21a629cbe609b67e.png differ diff --git a/assets/images/jaeger-1-24aa07f749ff6a07f71cf97b0fee13c2.png b/assets/images/jaeger-1-24aa07f749ff6a07f71cf97b0fee13c2.png new file mode 100644 index 00000000000..dc75fa2ce03 Binary files /dev/null and b/assets/images/jaeger-1-24aa07f749ff6a07f71cf97b0fee13c2.png differ diff --git a/assets/images/jaeger-2-4dc771655ab5c0ded1490676a9fc3fd9.png b/assets/images/jaeger-2-4dc771655ab5c0ded1490676a9fc3fd9.png new file mode 100644 index 00000000000..2c278385ad9 Binary files /dev/null and b/assets/images/jaeger-2-4dc771655ab5c0ded1490676a9fc3fd9.png differ diff --git a/assets/images/jaeger-3-ca388386c93e10b0833acbb652d9f4ff.png b/assets/images/jaeger-3-ca388386c93e10b0833acbb652d9f4ff.png new file mode 100644 index 00000000000..d51fbeb5d05 Binary files /dev/null and b/assets/images/jaeger-3-ca388386c93e10b0833acbb652d9f4ff.png differ diff --git a/assets/images/jwt-f72781533567349fb71fc37b9768e847.png b/assets/images/jwt-f72781533567349fb71fc37b9768e847.png new file mode 100644 index 00000000000..d1ff1a5efcd Binary files /dev/null and b/assets/images/jwt-f72781533567349fb71fc37b9768e847.png differ diff --git a/assets/images/platformatic-architecture-373095091e8fc6f88cd44e1503ecc8ed.png b/assets/images/platformatic-architecture-373095091e8fc6f88cd44e1503ecc8ed.png new file mode 100644 index 00000000000..e19a8cdd18f Binary files /dev/null and b/assets/images/platformatic-architecture-373095091e8fc6f88cd44e1503ecc8ed.png differ diff --git a/assets/images/platformatic-db-swagger-ui-fc0f45a8422fcb6a96e05f0618b8b72d.png b/assets/images/platformatic-db-swagger-ui-fc0f45a8422fcb6a96e05f0618b8b72d.png new file mode 100644 index 00000000000..c70726b4eda Binary files /dev/null and b/assets/images/platformatic-db-swagger-ui-fc0f45a8422fcb6a96e05f0618b8b72d.png differ diff --git a/assets/images/webhook-35d01c3e23ac75c26c9d519daa16cf1b.png b/assets/images/webhook-35d01c3e23ac75c26c9d519daa16cf1b.png new file mode 100644 index 00000000000..a22b7235834 Binary files /dev/null and b/assets/images/webhook-35d01c3e23ac75c26c9d519daa16cf1b.png differ diff --git a/assets/images/workspace-create-dynamic-47b784d30bbdcfd4eb9da2dc224bae1a.png b/assets/images/workspace-create-dynamic-47b784d30bbdcfd4eb9da2dc224bae1a.png new file mode 100644 index 00000000000..231b0fc0282 Binary files /dev/null and b/assets/images/workspace-create-dynamic-47b784d30bbdcfd4eb9da2dc224bae1a.png differ diff --git a/assets/js/0001790c.e2c5cad7.js b/assets/js/0001790c.e2c5cad7.js new file mode 100644 index 00000000000..47869e0d726 --- /dev/null +++ b/assets/js/0001790c.e2c5cad7.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkplatformatic_oss_website=self.webpackChunkplatformatic_oss_website||[]).push([[26683],{3905:(e,t,n)=>{n.d(t,{Zo:()=>c,kt:()=>d});var r=n(67294);function a(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function i(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function o(e){for(var t=1;t=0||(a[n]=e[n]);return a}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(a[n]=e[n])}return a}var p=r.createContext({}),s=function(e){var t=r.useContext(p),n=t;return e&&(n="function"==typeof e?e(t):o(o({},t),e)),n},c=function(e){var t=s(e.components);return r.createElement(p.Provider,{value:t},e.children)},u="mdxType",f={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},m=r.forwardRef((function(e,t){var n=e.components,a=e.mdxType,i=e.originalType,p=e.parentName,c=l(e,["components","mdxType","originalType","parentName"]),u=s(n),m=a,d=u["".concat(p,".").concat(m)]||u[m]||f[m]||i;return n?r.createElement(d,o(o({ref:t},c),{},{components:n})):r.createElement(d,o({ref:t},c))}));function d(e,t){var n=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var i=n.length,o=new Array(i);o[0]=m;var l={};for(var p in t)hasOwnProperty.call(t,p)&&(l[p]=t[p]);l.originalType=e,l[u]="string"==typeof e?e:a,o[1]=l;for(var s=2;s{n.r(t),n.d(t,{assets:()=>p,contentTitle:()=>o,default:()=>f,frontMatter:()=>i,metadata:()=>l,toc:()=>s});var r=n(87462),a=(n(67294),n(3905));const i={},o="Fastify Plugin",l={unversionedId:"reference/sql-events/fastify-plugin",id:"version-0.42.1/reference/sql-events/fastify-plugin",title:"Fastify Plugin",description:"The @platformatic/sql-events package exports a Fastify plugin that can be used out-of the box in a server application.",source:"@site/versioned_docs/version-0.42.1/reference/sql-events/fastify-plugin.md",sourceDirName:"reference/sql-events",slug:"/reference/sql-events/fastify-plugin",permalink:"/docs/reference/sql-events/fastify-plugin",draft:!1,editUrl:"https://github.com/platformatic/oss/edit/main/versioned_docs/version-0.42.1/reference/sql-events/fastify-plugin.md",tags:[],version:"0.42.1",frontMatter:{},sidebar:"docs",previous:{title:"Introduction to the sql-events module",permalink:"/docs/reference/sql-events/introduction"},next:{title:"Platformatic Cloud",permalink:"/docs/category/platformatic-cloud"}},p={},s=[{value:"Usage",id:"usage",level:4}],c={toc:s},u="wrapper";function f(e){let{components:t,...n}=e;return(0,a.kt)(u,(0,r.Z)({},c,n,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h1",{id:"fastify-plugin"},"Fastify Plugin"),(0,a.kt)("p",null,"The ",(0,a.kt)("inlineCode",{parentName:"p"},"@platformatic/sql-events")," package exports a ",(0,a.kt)("a",{parentName:"p",href:"https://fastify.io"},"Fastify")," plugin that can be used out-of the box in a server application.\nIt requires that ",(0,a.kt)("inlineCode",{parentName:"p"},"@platformatic/sql-mapper")," is registered before it."),(0,a.kt)("p",null,"The plugin has the following options:"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"mq")," - an instance of ",(0,a.kt)("a",{parentName:"li",href:"https://npm.im/mqemitter"},(0,a.kt)("inlineCode",{parentName:"a"},"mqemitter")),", optional.")),(0,a.kt)("p",null,"The plugin adds the following properties to the ",(0,a.kt)("inlineCode",{parentName:"p"},"app.platformatic")," object:"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"mq")," \u2014 an instance of ",(0,a.kt)("a",{parentName:"li",href:"https://npm.im/mqemitter"},(0,a.kt)("inlineCode",{parentName:"a"},"mqemitter"))),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"subscribe(topics)")," \u2014 a method to create a node ",(0,a.kt)("a",{parentName:"li",href:"https://nodejs.org/api/stream.html#new-streamreadableoptions"},(0,a.kt)("inlineCode",{parentName:"a"},"Readable")),"\nthat will contain the events emitted by those topics.")),(0,a.kt)("p",null,"Each entities of ",(0,a.kt)("inlineCode",{parentName:"p"},"app.platformatic.entities")," will be augmented with two functions:"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"entity.getPublishTopic({ ctx, data, action })")," "),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"entity.getSubscriptionTopic({ ctx, action })"))),(0,a.kt)("p",null,"Where ",(0,a.kt)("inlineCode",{parentName:"p"},"ctx")," is the GraphQL Context, ",(0,a.kt)("inlineCode",{parentName:"p"},"data")," is the object that will be emitted and ",(0,a.kt)("inlineCode",{parentName:"p"},"action")," is either ",(0,a.kt)("inlineCode",{parentName:"p"},"save")," or ",(0,a.kt)("inlineCode",{parentName:"p"},"delete"),"."),(0,a.kt)("h4",{id:"usage"},"Usage"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-js"},"'use strict'\n\nconst Fastify = require('fastify')\nconst mapper = require('@platformatic/sql-mapper')\nconst events = require('@platformatic/sql-events')\n\nasync function main() {\n const app = Fastify({\n logger: {\n level: 'info'\n }\n })\n app.register(mapper.plugin, {\n connectionString: 'postgres://postgres:postgres@127.0.0.1/postgres'\n })\n\n app.register(events)\n\n // setup your routes\n\n\n await app.listen({ port: 3333 })\n}\n\nmain()\n")))}f.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/012331f6.33f64c55.js b/assets/js/012331f6.33f64c55.js new file mode 100644 index 00000000000..2e648bbd55b --- /dev/null +++ b/assets/js/012331f6.33f64c55.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkplatformatic_oss_website=self.webpackChunkplatformatic_oss_website||[]).push([[57842],{3905:(e,t,n)=>{n.d(t,{Zo:()=>s,kt:()=>d});var r=n(67294);function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function i(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function a(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var c=r.createContext({}),p=function(e){var t=r.useContext(c),n=t;return e&&(n="function"==typeof e?e(t):a(a({},t),e)),n},s=function(e){var t=p(e.components);return r.createElement(c.Provider,{value:t},e.children)},u="mdxType",m={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},f=r.forwardRef((function(e,t){var n=e.components,o=e.mdxType,i=e.originalType,c=e.parentName,s=l(e,["components","mdxType","originalType","parentName"]),u=p(n),f=o,d=u["".concat(c,".").concat(f)]||u[f]||m[f]||i;return n?r.createElement(d,a(a({ref:t},s),{},{components:n})):r.createElement(d,a({ref:t},s))}));function d(e,t){var n=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var i=n.length,a=new Array(i);a[0]=f;var l={};for(var c in t)hasOwnProperty.call(t,c)&&(l[c]=t[c]);l.originalType=e,l[u]="string"==typeof e?e:o,a[1]=l;for(var p=2;p{n.r(t),n.d(t,{assets:()=>c,contentTitle:()=>a,default:()=>m,frontMatter:()=>i,metadata:()=>l,toc:()=>p});var r=n(87462),o=(n(67294),n(3905));const i={},a="Programmatic API",l={unversionedId:"reference/client/programmatic",id:"version-0.41.1/reference/client/programmatic",title:"Programmatic API",description:"It is possible to use the Platformatic client without the generator.",source:"@site/versioned_docs/version-0.41.1/reference/client/programmatic.md",sourceDirName:"reference/client",slug:"/reference/client/programmatic",permalink:"/docs/0.41.1/reference/client/programmatic",draft:!1,editUrl:"https://github.com/platformatic/oss/edit/main/versioned_docs/version-0.41.1/reference/client/programmatic.md",tags:[],version:"0.41.1",frontMatter:{},sidebar:"docs",previous:{title:"Platformatic Client",permalink:"/docs/0.41.1/reference/client/introduction"},next:{title:"Frontend client",permalink:"/docs/0.41.1/reference/client/frontend"}},c={},p=[{value:"OpenAPI Client",id:"openapi-client",level:2},{value:"GraphQL Client",id:"graphql-client",level:2}],s={toc:p},u="wrapper";function m(e){let{components:t,...n}=e;return(0,o.kt)(u,(0,r.Z)({},s,n,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h1",{id:"programmatic-api"},"Programmatic API"),(0,o.kt)("p",null,"It is possible to use the Platformatic client without the generator."),(0,o.kt)("h2",{id:"openapi-client"},"OpenAPI Client"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-js"},"import { buildOpenAPIClient } from '@platformatic/client'\n\nconst client = await buildOpenAPIClient({\n url: `https://yourapi.com/documentation/json`, \n // path: 'path/to/openapi.json',\n headers: {\n 'foo': 'bar'\n }\n})\n\nconst res = await client.yourOperationName({ foo: 'bar' })\n\nconsole.log(res)\n")),(0,o.kt)("p",null,"If you use Typescript you can take advantage of the generated types file "),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-ts"},"import { buildOpenAPIClient } from '@platformatic/client'\nimport Client from './client'\n//\n// interface Client {\n// getMovies(req: GetMoviesRequest): Promise>;\n// createMovie(req: CreateMovieRequest): Promise;\n// ...\n// }\n//\n\nconst client: Client = await buildOpenAPIClient({\n url: `https://yourapi.com/documentation/json`, \n // path: 'path/to/openapi.json',\n headers: {\n 'foo': 'bar'\n }\n})\n\nconst res = await client.getMovies()\nconsole.log(res)\n")),(0,o.kt)("h2",{id:"graphql-client"},"GraphQL Client"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-js"},"import { buildGraphQLClient } from '@platformatic/client'\n\nconst client = await buildGraphQLClient({\n url: `https://yourapi.com/graphql`,\n headers: {\n 'foo': 'bar'\n }\n})\n\nconst res = await client.graphql({\n query: `\n mutation createMovie($title: String!) {\n saveMovie(input: {title: $title}) {\n id\n title\n }\n }\n `,\n variables: {\n title: 'The Matrix'\n }\n})\n\nconsole.log(res)\n")))}m.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/01c1675a.a7d9edd2.js b/assets/js/01c1675a.a7d9edd2.js new file mode 100644 index 00000000000..b7023bf09f4 --- /dev/null +++ b/assets/js/01c1675a.a7d9edd2.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkplatformatic_oss_website=self.webpackChunkplatformatic_oss_website||[]).push([[17085],{3905:(e,r,t)=>{t.d(r,{Zo:()=>c,kt:()=>d});var n=t(67294);function o(e,r,t){return r in e?Object.defineProperty(e,r,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[r]=t,e}function a(e,r){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);r&&(n=n.filter((function(r){return Object.getOwnPropertyDescriptor(e,r).enumerable}))),t.push.apply(t,n)}return t}function i(e){for(var r=1;r=0||(o[t]=e[t]);return o}(e,r);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(o[t]=e[t])}return o}var p=n.createContext({}),s=function(e){var r=n.useContext(p),t=r;return e&&(t="function"==typeof e?e(r):i(i({},r),e)),t},c=function(e){var r=s(e.components);return n.createElement(p.Provider,{value:r},e.children)},f="mdxType",u={inlineCode:"code",wrapper:function(e){var r=e.children;return n.createElement(n.Fragment,{},r)}},g=n.forwardRef((function(e,r){var t=e.components,o=e.mdxType,a=e.originalType,p=e.parentName,c=l(e,["components","mdxType","originalType","parentName"]),f=s(t),g=o,d=f["".concat(p,".").concat(g)]||f[g]||u[g]||a;return t?n.createElement(d,i(i({ref:r},c),{},{components:t})):n.createElement(d,i({ref:r},c))}));function d(e,r){var t=arguments,o=r&&r.mdxType;if("string"==typeof e||o){var a=t.length,i=new Array(a);i[0]=g;var l={};for(var p in r)hasOwnProperty.call(r,p)&&(l[p]=r[p]);l.originalType=e,l[f]="string"==typeof e?e:o,i[1]=l;for(var s=2;s{t.r(r),t.d(r,{assets:()=>p,contentTitle:()=>i,default:()=>u,frontMatter:()=>a,metadata:()=>l,toc:()=>s});var n=t(87462),o=(t(67294),t(3905));const a={},i="Ignoring types and fields",l={unversionedId:"reference/sql-graphql/ignore",id:"version-0.41.3/reference/sql-graphql/ignore",title:"Ignoring types and fields",description:"@platformatic/sql-graphql allows to selectively ignore types and fields.",source:"@site/versioned_docs/version-0.41.3/reference/sql-graphql/ignore.md",sourceDirName:"reference/sql-graphql",slug:"/reference/sql-graphql/ignore",permalink:"/docs/0.41.3/reference/sql-graphql/ignore",draft:!1,editUrl:"https://github.com/platformatic/oss/edit/main/versioned_docs/version-0.41.3/reference/sql-graphql/ignore.md",tags:[],version:"0.41.3",frontMatter:{},sidebar:"docs",previous:{title:"Many To Many Relationship",permalink:"/docs/0.41.3/reference/sql-graphql/many-to-many"},next:{title:"Introduction to @platformatic/sql-mapper",permalink:"/docs/0.41.3/reference/sql-mapper/introduction"}},p={},s=[],c={toc:s},f="wrapper";function u(e){let{components:r,...t}=e;return(0,o.kt)(f,(0,n.Z)({},c,t,{components:r,mdxType:"MDXLayout"}),(0,o.kt)("h1",{id:"ignoring-types-and-fields"},"Ignoring types and fields"),(0,o.kt)("p",null,(0,o.kt)("inlineCode",{parentName:"p"},"@platformatic/sql-graphql")," allows to selectively ignore types and fields."),(0,o.kt)("p",null,"To ignore types:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-javascript"},"app.register(require('@platformatic/sql-graphql'), {\n ignore: {\n categories: true\n }\n})\n")),(0,o.kt)("p",null,"To ignore individual fields:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-javascript"},"app.register(require('@platformatic/sql-graphql'), {\n ignore: {\n categories: {\n name: true\n }\n }\n})\n")))}u.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/01fa4eff.4174a5ad.js b/assets/js/01fa4eff.4174a5ad.js new file mode 100644 index 00000000000..939109865f6 --- /dev/null +++ b/assets/js/01fa4eff.4174a5ad.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkplatformatic_oss_website=self.webpackChunkplatformatic_oss_website||[]).push([[88105],{27653:e=>{e.exports=JSON.parse('{"title":"Guides","slug":"/category/guides","permalink":"/docs/0.42.0/category/guides","navigation":{"previous":{"title":"Architecture","permalink":"/docs/0.42.0/getting-started/architecture"},"next":{"title":"Deployment","permalink":"/docs/0.42.0/guides/deployment/"}}}')}}]); \ No newline at end of file diff --git a/assets/js/03a0a280.c6d48670.js b/assets/js/03a0a280.c6d48670.js new file mode 100644 index 00000000000..4f3284ff77d --- /dev/null +++ b/assets/js/03a0a280.c6d48670.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkplatformatic_oss_website=self.webpackChunkplatformatic_oss_website||[]).push([[37051],{3905:(e,t,n)=>{n.d(t,{Zo:()=>c,kt:()=>f});var i=n(67294);function r(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function a(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);t&&(i=i.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,i)}return n}function o(e){for(var t=1;t=0||(r[n]=e[n]);return r}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(i=0;i=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(r[n]=e[n])}return r}var l=i.createContext({}),p=function(e){var t=i.useContext(l),n=t;return e&&(n="function"==typeof e?e(t):o(o({},t),e)),n},c=function(e){var t=p(e.components);return i.createElement(l.Provider,{value:t},e.children)},u="mdxType",m={inlineCode:"code",wrapper:function(e){var t=e.children;return i.createElement(i.Fragment,{},t)}},d=i.forwardRef((function(e,t){var n=e.components,r=e.mdxType,a=e.originalType,l=e.parentName,c=s(e,["components","mdxType","originalType","parentName"]),u=p(n),d=r,f=u["".concat(l,".").concat(d)]||u[d]||m[d]||a;return n?i.createElement(f,o(o({ref:t},c),{},{components:n})):i.createElement(f,o({ref:t},c))}));function f(e,t){var n=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var a=n.length,o=new Array(a);o[0]=d;var s={};for(var l in t)hasOwnProperty.call(t,l)&&(s[l]=t[l]);s.originalType=e,s[u]="string"==typeof e?e:r,o[1]=s;for(var p=2;p{n.r(t),n.d(t,{assets:()=>l,contentTitle:()=>o,default:()=>m,frontMatter:()=>a,metadata:()=>s,toc:()=>p});var i=n(87462),r=(n(67294),n(3905));const a={},o="Introduction to the sql-events module",s={unversionedId:"reference/sql-events/introduction",id:"version-0.41.2/reference/sql-events/introduction",title:"Introduction to the sql-events module",description:"The Platformatic DB sql-events uses mqemitter to publish events when entities are saved and deleted.",source:"@site/versioned_docs/version-0.41.2/reference/sql-events/introduction.md",sourceDirName:"reference/sql-events",slug:"/reference/sql-events/introduction",permalink:"/docs/0.41.2/reference/sql-events/introduction",draft:!1,editUrl:"https://github.com/platformatic/oss/edit/main/versioned_docs/version-0.41.2/reference/sql-events/introduction.md",tags:[],version:"0.41.2",frontMatter:{},sidebar:"docs",previous:{title:"Transactions",permalink:"/docs/0.41.2/reference/sql-mapper/entities/transactions"},next:{title:"Fastify Plugin",permalink:"/docs/0.41.2/reference/sql-events/fastify-plugin"}},l={},p=[{value:"Install",id:"install",level:2},{value:"Usage",id:"usage",level:2},{value:"API",id:"api",level:3}],c={toc:p},u="wrapper";function m(e){let{components:t,...n}=e;return(0,r.kt)(u,(0,i.Z)({},c,n,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("h1",{id:"introduction-to-the-sql-events-module"},"Introduction to the sql-events module"),(0,r.kt)("p",null,"The Platformatic DB sql-events uses ",(0,r.kt)("a",{parentName:"p",href:"http://npm.im/mqemitter"},"mqemitter")," to publish events when ",(0,r.kt)("a",{parentName:"p",href:"/docs/0.41.2/reference/sql-mapper/entities/introduction"},"entities")," are saved and deleted."),(0,r.kt)("p",null,"These events are useful to distribute updates to clients, e.g. via WebSocket, Server-Sent Events, or GraphQL Subscritions.\nWhen subscribing and using a multi-process system with a broker like Redis, a subscribed topic will receive the data from all\nthe other processes."),(0,r.kt)("p",null,"They are not the right choice for executing some code whenever an entity is created, modified or deleted, in that case\nuse ",(0,r.kt)("a",{parentName:"p",href:"/docs/0.41.2/reference/sql-mapper/entities/hooks"},"@platformatic/sql-mapper hooks"),"."),(0,r.kt)("h2",{id:"install"},"Install"),(0,r.kt)("p",null,"You can use together with ",(0,r.kt)("inlineCode",{parentName:"p"},"@platformatic/sql-mapper"),"."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre"},"npm i @platformatic/sql-mapper @platformatic/sql-events\n")),(0,r.kt)("h2",{id:"usage"},"Usage"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-javascript"},"const { connect } = require('@platformatic/sql-mapper')\nconst { setupEmitter } = require('@platformatic/sql-events')\nconst { pino } = require('pino')\n\nconst log = pino()\n\nasync function onDatabaseLoad (db, sql) {\n await db.query(sql`CREATE TABLE pages (\n id SERIAL PRIMARY KEY,\n title VARCHAR(255) NOT NULL\n );`)\n}\nconst connectionString = 'postgres://postgres:postgres@localhost:5432/postgres'\nconst mapper = await connect({\n connectionString,\n log,\n onDatabaseLoad,\n ignore: {},\n hooks: {\n Page: {\n find: async function(_find, opts) {\n console.log('hook called');\n return await _find(opts)\n }\n }\n }\n})\n\nsetupEmitter({ mapper, log })\n\nconst pageEntity = mapper.entities.page\n\nconst queue = await mapper.subscribe([\n pageEntity.getSubscriptionTopic({ action: 'save' }),\n pageEntity.getSubscriptionTopic({ action: 'delete' })\n])\n\nconst page = await pageEntity.save({\n input: { title: 'fourth page' }\n})\n\nconst page2 = await pageEntity.save({\n input: {\n id: page.id,\n title: 'fifth page'\n }\n})\n\nawait pageEntity.delete({\n where: {\n id: {\n eq: page.id\n }\n },\n fields: ['id', 'title']\n})\n\nfor await (const ev of queue) {\n console.log(ev)\n if (expected.length === 0) {\n break\n }\n}\n\nprocess.exit(0)\n")),(0,r.kt)("h3",{id:"api"},"API"),(0,r.kt)("p",null,"The ",(0,r.kt)("inlineCode",{parentName:"p"},"setupEmitter")," function has the following options:"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"mq")," - an instance of ",(0,r.kt)("a",{parentName:"li",href:"https://npm.im/mqemitter"},(0,r.kt)("inlineCode",{parentName:"a"},"mqemitter")),", optional.")),(0,r.kt)("p",null,"The ",(0,r.kt)("inlineCode",{parentName:"p"},"setupEmitter")," functions adds the following properties to the ",(0,r.kt)("inlineCode",{parentName:"p"},"mapper")," object:"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"mq")," \u2014 an instance of ",(0,r.kt)("a",{parentName:"li",href:"https://npm.im/mqemitter"},(0,r.kt)("inlineCode",{parentName:"a"},"mqemitter"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"subscribe(topics)")," \u2014 a method to create a node ",(0,r.kt)("a",{parentName:"li",href:"https://nodejs.org/api/stream.html#new-streamreadableoptions"},(0,r.kt)("inlineCode",{parentName:"a"},"Readable")),"\nthat will contain the events emitted by those topics.")),(0,r.kt)("p",null,"Each entities of ",(0,r.kt)("inlineCode",{parentName:"p"},"app.platformatic.entities")," will be augmented with two functions:"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"entity.getPublishTopic({ ctx, data, action })")," "),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"entity.getSubscriptionTopic({ ctx, action })"))),(0,r.kt)("p",null,"Where ",(0,r.kt)("inlineCode",{parentName:"p"},"ctx")," is the GraphQL Context, ",(0,r.kt)("inlineCode",{parentName:"p"},"data")," is the object that will be emitted and ",(0,r.kt)("inlineCode",{parentName:"p"},"action")," is either ",(0,r.kt)("inlineCode",{parentName:"p"},"save")," or ",(0,r.kt)("inlineCode",{parentName:"p"},"delete"),"."))}m.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/0644d42d.04ebfe4f.js b/assets/js/0644d42d.04ebfe4f.js new file mode 100644 index 00000000000..da54a1fce8a --- /dev/null +++ b/assets/js/0644d42d.04ebfe4f.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkplatformatic_oss_website=self.webpackChunkplatformatic_oss_website||[]).push([[88587],{3905:(e,t,n)=>{n.d(t,{Zo:()=>p,kt:()=>m});var r=n(67294);function i(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function o(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function a(e){for(var t=1;t=0||(i[n]=e[n]);return i}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(i[n]=e[n])}return i}var s=r.createContext({}),c=function(e){var t=r.useContext(s),n=t;return e&&(n="function"==typeof e?e(t):a(a({},t),e)),n},p=function(e){var t=c(e.components);return r.createElement(s.Provider,{value:t},e.children)},u="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},f=r.forwardRef((function(e,t){var n=e.components,i=e.mdxType,o=e.originalType,s=e.parentName,p=l(e,["components","mdxType","originalType","parentName"]),u=c(n),f=i,m=u["".concat(s,".").concat(f)]||u[f]||d[f]||o;return n?r.createElement(m,a(a({ref:t},p),{},{components:n})):r.createElement(m,a({ref:t},p))}));function m(e,t){var n=arguments,i=t&&t.mdxType;if("string"==typeof e||i){var o=n.length,a=new Array(o);a[0]=f;var l={};for(var s in t)hasOwnProperty.call(t,s)&&(l[s]=t[s]);l.originalType=e,l[u]="string"==typeof e?e:i,a[1]=l;for(var c=2;c{n.r(t),n.d(t,{assets:()=>s,contentTitle:()=>a,default:()=>d,frontMatter:()=>o,metadata:()=>l,toc:()=>c});var r=n(87462),i=(n(67294),n(3905));const o={},a="Subscription",l={unversionedId:"reference/sql-graphql/subscriptions",id:"reference/sql-graphql/subscriptions",title:"Subscription",description:"When the GraphQL plugin is loaded, some subscriptions are automatically adding to",source:"@site/docs/reference/sql-graphql/subscriptions.md",sourceDirName:"reference/sql-graphql",slug:"/reference/sql-graphql/subscriptions",permalink:"/docs/next/reference/sql-graphql/subscriptions",draft:!1,editUrl:"https://github.com/platformatic/platformatic/edit/main/docs/reference/sql-graphql/subscriptions.md",tags:[],version:"current",frontMatter:{}},s={},c=[{value:"[ENTITY]Saved",id:"entitysaved",level:2},{value:"[ENTITY]Deleted",id:"entitydeleted",level:2}],p={toc:c},u="wrapper";function d(e){let{components:t,...n}=e;return(0,i.kt)(u,(0,r.Z)({},p,n,{components:t,mdxType:"MDXLayout"}),(0,i.kt)("h1",{id:"subscription"},"Subscription"),(0,i.kt)("p",null,"When the GraphQL plugin is loaded, some subscriptions are automatically adding to\nthe GraphQL schema if the ",(0,i.kt)("inlineCode",{parentName:"p"},"@platformatic/sql-events")," plugin has been previously registered."),(0,i.kt)("p",null,"It's possible to avoid creating the subscriptions for a given entity by adding the ",(0,i.kt)("inlineCode",{parentName:"p"},"subscriptionIgnore")," config,\nlike so: ",(0,i.kt)("inlineCode",{parentName:"p"},"subscriptionIgnore: ['page']"),"."),(0,i.kt)("h2",{id:"entitysaved"},(0,i.kt)("inlineCode",{parentName:"h2"},"[ENTITY]Saved")),(0,i.kt)("p",null,"Published whenever an entity is saved, e.g. when the mutation ",(0,i.kt)("inlineCode",{parentName:"p"},"insert[ENTITY]")," or ",(0,i.kt)("inlineCode",{parentName:"p"},"save[ENTITY]")," are called."),(0,i.kt)("h2",{id:"entitydeleted"},(0,i.kt)("inlineCode",{parentName:"h2"},"[ENTITY]Deleted")),(0,i.kt)("p",null,"Published whenever an entity is deleted, e.g. when the mutation ",(0,i.kt)("inlineCode",{parentName:"p"},"delete[ENTITY]")," is called.."))}d.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/071c8372.695446df.js b/assets/js/071c8372.695446df.js new file mode 100644 index 00000000000..2053337f5d3 --- /dev/null +++ b/assets/js/071c8372.695446df.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkplatformatic_oss_website=self.webpackChunkplatformatic_oss_website||[]).push([[97306],{3905:(t,e,a)=>{a.d(e,{Zo:()=>u,kt:()=>f});var n=a(67294);function r(t,e,a){return e in t?Object.defineProperty(t,e,{value:a,enumerable:!0,configurable:!0,writable:!0}):t[e]=a,t}function l(t,e){var a=Object.keys(t);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(t);e&&(n=n.filter((function(e){return Object.getOwnPropertyDescriptor(t,e).enumerable}))),a.push.apply(a,n)}return a}function i(t){for(var e=1;e=0||(r[a]=t[a]);return r}(t,e);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(t);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(t,a)&&(r[a]=t[a])}return r}var p=n.createContext({}),c=function(t){var e=n.useContext(p),a=e;return t&&(a="function"==typeof t?t(e):i(i({},e),t)),a},u=function(t){var e=c(t.components);return n.createElement(p.Provider,{value:e},t.children)},s="mdxType",m={inlineCode:"code",wrapper:function(t){var e=t.children;return n.createElement(n.Fragment,{},e)}},d=n.forwardRef((function(t,e){var a=t.components,r=t.mdxType,l=t.originalType,p=t.parentName,u=o(t,["components","mdxType","originalType","parentName"]),s=c(a),d=r,f=s["".concat(p,".").concat(d)]||s[d]||m[d]||l;return a?n.createElement(f,i(i({ref:e},u),{},{components:a})):n.createElement(f,i({ref:e},u))}));function f(t,e){var a=arguments,r=e&&e.mdxType;if("string"==typeof t||r){var l=a.length,i=new Array(l);i[0]=d;var o={};for(var p in e)hasOwnProperty.call(e,p)&&(o[p]=e[p]);o.originalType=t,o[s]="string"==typeof t?t:r,i[1]=o;for(var c=2;c{a.r(e),a.d(e,{assets:()=>p,contentTitle:()=>i,default:()=>m,frontMatter:()=>l,metadata:()=>o,toc:()=>c});var n=a(87462),r=(a(67294),a(3905));const l={},i="Platformatic Cloud Pricing",o={unversionedId:"platformatic-cloud/pricing",id:"platformatic-cloud/pricing",title:"Platformatic Cloud Pricing",description:"Find the plan that works best for you!",source:"@site/docs/platformatic-cloud/pricing.md",sourceDirName:"platformatic-cloud",slug:"/platformatic-cloud/pricing",permalink:"/docs/next/platformatic-cloud/pricing",draft:!1,editUrl:"https://github.com/platformatic/platformatic/edit/main/docs/platformatic-cloud/pricing.md",tags:[],version:"current",frontMatter:{}},p={},c=[{value:"FAQ",id:"faq",level:2},{value:"What is a slot?",id:"what-is-a-slot",level:3},{value:"What is a workspace?",id:"what-is-a-workspace",level:3},{value:"Can I change or upgrade my plan after I start using Platformatic?",id:"can-i-change-or-upgrade-my-plan-after-i-start-using-platformatic",level:3},{value:"What does it mean I can set my own CNAME?",id:"what-does-it-mean-i-can-set-my-own-cname",level:3}],u={toc:c},s="wrapper";function m(t){let{components:e,...a}=t;return(0,r.kt)(s,(0,n.Z)({},u,a,{components:e,mdxType:"MDXLayout"}),(0,r.kt)("h1",{id:"platformatic-cloud-pricing"},"Platformatic Cloud Pricing"),(0,r.kt)("p",null,"Find the plan that works best for you!"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null}),(0,r.kt)("th",{parentName:"tr",align:null},"Free"),(0,r.kt)("th",{parentName:"tr",align:null},"Basic"),(0,r.kt)("th",{parentName:"tr",align:null},"Advanced"),(0,r.kt)("th",{parentName:"tr",align:null},"Pro"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},"Pricing"),(0,r.kt)("td",{parentName:"tr",align:null},"$0"),(0,r.kt)("td",{parentName:"tr",align:null},"$4.99"),(0,r.kt)("td",{parentName:"tr",align:null},"$22.45"),(0,r.kt)("td",{parentName:"tr",align:null},"$49.99")),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},"Slots"),(0,r.kt)("td",{parentName:"tr",align:null},"0"),(0,r.kt)("td",{parentName:"tr",align:null},"1"),(0,r.kt)("td",{parentName:"tr",align:null},"5"),(0,r.kt)("td",{parentName:"tr",align:null},"12")),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},"CNAME"),(0,r.kt)("td",{parentName:"tr",align:null},"-"),(0,r.kt)("td",{parentName:"tr",align:null},"true"),(0,r.kt)("td",{parentName:"tr",align:null},"true"),(0,r.kt)("td",{parentName:"tr",align:null},"true")),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},"Always On"),(0,r.kt)("td",{parentName:"tr",align:null},"-"),(0,r.kt)("td",{parentName:"tr",align:null},"true"),(0,r.kt)("td",{parentName:"tr",align:null},"true"),(0,r.kt)("td",{parentName:"tr",align:null},"true")))),(0,r.kt)("h2",{id:"faq"},"FAQ"),(0,r.kt)("h3",{id:"what-is-a-slot"},"What is a slot?"),(0,r.kt)("p",null,"One slot is equal to one compute unit. The free plan has no always-on\nmachines and they will be stopped while not in use."),(0,r.kt)("h3",{id:"what-is-a-workspace"},"What is a workspace?"),(0,r.kt)("p",null,"A workspace is the security boundary of your deployment. You will use\nthe same credentials to deploy to one."),(0,r.kt)("p",null,"A workspace can be either static or dynamic.\nA static workspace always deploy to the same domain, while\nin a dynamic workspace each deployment will have its own domain.\nThe latter are useful to provde for pull request previews."),(0,r.kt)("h3",{id:"can-i-change-or-upgrade-my-plan-after-i-start-using-platformatic"},"Can I change or upgrade my plan after I start using Platformatic?"),(0,r.kt)("p",null,"Plans can be changed or upgraded at any time"),(0,r.kt)("h3",{id:"what-does-it-mean-i-can-set-my-own-cname"},"What does it mean I can set my own CNAME?"),(0,r.kt)("p",null,"Free applications only gets a ",(0,r.kt)("inlineCode",{parentName:"p"},"*.deploy.space")," domain name to access\ntheir application. All other plans can set it to a domain of their chosing."))}m.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/078f9179.0d04ba9c.js b/assets/js/078f9179.0d04ba9c.js new file mode 100644 index 00000000000..b3d5c3fe024 --- /dev/null +++ b/assets/js/078f9179.0d04ba9c.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkplatformatic_oss_website=self.webpackChunkplatformatic_oss_website||[]).push([[24931],{3905:(e,n,i)=>{i.d(n,{Zo:()=>m,kt:()=>g});var l=i(67294);function t(e,n,i){return n in e?Object.defineProperty(e,n,{value:i,enumerable:!0,configurable:!0,writable:!0}):e[n]=i,e}function I(e,n){var i=Object.keys(e);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(e);n&&(l=l.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),i.push.apply(i,l)}return i}function d(e){for(var n=1;n=0||(t[i]=e[i]);return t}(e,n);if(Object.getOwnPropertySymbols){var I=Object.getOwnPropertySymbols(e);for(l=0;l=0||Object.prototype.propertyIsEnumerable.call(e,i)&&(t[i]=e[i])}return t}var c=l.createContext({}),o=function(e){var n=l.useContext(c),i=n;return e&&(i="function"==typeof e?e(n):d(d({},n),e)),i},m=function(e){var n=o(e.components);return l.createElement(c.Provider,{value:n},e.children)},Z="mdxType",s={inlineCode:"code",wrapper:function(e){var n=e.children;return l.createElement(l.Fragment,{},n)}},p=l.forwardRef((function(e,n){var i=e.components,t=e.mdxType,I=e.originalType,c=e.parentName,m=a(e,["components","mdxType","originalType","parentName"]),Z=o(i),p=t,g=Z["".concat(c,".").concat(p)]||Z[p]||s[p]||I;return i?l.createElement(g,d(d({ref:n},m),{},{components:i})):l.createElement(g,d({ref:n},m))}));function g(e,n){var i=arguments,t=n&&n.mdxType;if("string"==typeof e||t){var I=i.length,d=new Array(I);d[0]=p;var a={};for(var c in n)hasOwnProperty.call(n,c)&&(a[c]=n[c]);a.originalType=e,a[Z]="string"==typeof e?e:t,d[1]=a;for(var o=2;o{i.r(n),i.d(n,{assets:()=>c,contentTitle:()=>d,default:()=>s,frontMatter:()=>I,metadata:()=>a,toc:()=>o});var l=i(87462),t=(i(67294),i(3905));const I={},d="Hooks",a={unversionedId:"reference/sql-mapper/entities/hooks",id:"version-0.42.1/reference/sql-mapper/entities/hooks",title:"Hooks",description:"Entity hooks are a way to wrap the API methods for an entity and add custom behaviour.",source:"@site/versioned_docs/version-0.42.1/reference/sql-mapper/entities/hooks.md",sourceDirName:"reference/sql-mapper/entities",slug:"/reference/sql-mapper/entities/hooks",permalink:"/docs/reference/sql-mapper/entities/hooks",draft:!1,editUrl:"https://github.com/platformatic/oss/edit/main/versioned_docs/version-0.42.1/reference/sql-mapper/entities/hooks.md",tags:[],version:"0.42.1",frontMatter:{},sidebar:"docs",previous:{title:"Example",permalink:"/docs/reference/sql-mapper/entities/example"},next:{title:"Relations",permalink:"/docs/reference/sql-mapper/entities/relations"}},c={},o=[{value:"How to use hooks",id:"how-to-use-hooks",level:2},{value:"Usage",id:"usage",level:3},{value:"Multiple Hooks",id:"multiple-hooks",level:2}],m={toc:o},Z="wrapper";function s(e){let{components:n,...I}=e;return(0,t.kt)(Z,(0,l.Z)({},m,I,{components:n,mdxType:"MDXLayout"}),(0,t.kt)("h1",{id:"hooks"},"Hooks"),(0,t.kt)("p",null,"Entity hooks are a way to wrap the ",(0,t.kt)("a",{parentName:"p",href:"./api"},"API methods")," for an entity and add custom behaviour."),(0,t.kt)("p",null,"The Platformatic DB SQL Mapper provides an ",(0,t.kt)("inlineCode",{parentName:"p"},"addEntityHooks(entityName, spec)")," function that can be used to add hooks for an entity."),(0,t.kt)("h2",{id:"how-to-use-hooks"},"How to use hooks"),(0,t.kt)("p",null,(0,t.kt)("inlineCode",{parentName:"p"},"addEntityHooks")," accepts two arguments:"),(0,t.kt)("ol",null,(0,t.kt)("li",{parentName:"ol"},"A string representing the entity name (singularized), for example ",(0,t.kt)("inlineCode",{parentName:"li"},"'page'"),"."),(0,t.kt)("li",{parentName:"ol"},"A key/value object where the key is one of the API methods (",(0,t.kt)("inlineCode",{parentName:"li"},"find"),", ",(0,t.kt)("inlineCode",{parentName:"li"},"insert"),", ",(0,t.kt)("inlineCode",{parentName:"li"},"save"),", ",(0,t.kt)("inlineCode",{parentName:"li"},"delete"),") and the value is a callback function. The callback will be called with the ",(0,t.kt)("em",{parentName:"li"},"original")," API method and the options that were passed to that method. See the example below.")),(0,t.kt)("h3",{id:"usage"},"Usage"),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre",className:"language-js"},'\'use strict\'\nconst { connect } = require(\'@platformatic/sql-mapper\')\nconst { pino } = require(\'pino\')\nconst pretty = require(\'pino-pretty\')\nconst logger = pino(pretty())\n\nasync function main() {\n const pgConnectionString = \'postgres://postgres:postgres@127.0.0.1/postgres\'\n const mapper = await connect({\n connectionString: pgConnectionString,\n log: logger,\n })\n mapper.addEntityHooks(\'page\', {\n find: async (originalFind, opts) => {\n // Add a `foo` field with `bar` value to each row\n const res = await originalFind(opts)\n return res.map((row) => {\n row.foo = \'bar\'\n return row\n })\n }\n })\n const res = await mapper.entities.page.find({\n fields: [\'id\', \'title\',],\n where: {\n id: {\n lt: 10\n }\n },\n })\n logger.info(res)\n /**\n [\n 0: {\n "id": "5",\n "title": "Page 1",\n "foo": "bar"\n },\n 1: {\n "id": "6",\n "title": "Page 2",\n "foo": "bar"\n }\n ]\n */\n await mapper.db.dispose()\n}\nmain()\n')),(0,t.kt)("h2",{id:"multiple-hooks"},"Multiple Hooks"),(0,t.kt)("p",null,"Multiple hooks can be added for the same entity and API method, for example:"),(0,t.kt)("pre",null,(0,t.kt)("code",{parentName:"pre",className:"language-js"},'\'use strict\'\nconst { connect } = require(\'@platformatic/sql-mapper\')\nconst { pino } = require(\'pino\')\nconst pretty = require(\'pino-pretty\')\nconst logger = pino(pretty())\n\nasync function main() {\n const pgConnectionString = \'postgres://postgres:postgres@127.0.0.1/postgres\'\n const mapper = await connect({\n connectionString: pgConnectionString,\n log: logger,\n })\n mapper.addEntityHooks(\'page\', {\n find: async function firstHook(previousFunction, opts) {\n // Add a `foo` field with `bar` value to each row\n const res = await previousFunction(opts)\n return res.map((row) => {\n row.foo = \'bar\'\n return row\n })\n }\n })\n mapper.addEntityHooks(\'page\', {\n find: async function secondHook(previousFunction, opts) {\n // Add a `bar` field with `baz` value to each row\n const res = await previousFunction(opts)\n return res.map((row) => {\n row.bar = \'baz\'\n return row\n })\n }\n })\n const res = await mapper.entities.page.find({\n fields: [\'id\', \'title\',],\n where: {\n id: {\n lt: 10\n }\n },\n })\n logger.info(res)\n /**\n [\n 0: {\n "id": "5",\n "title": "Page 1",\n "foo": "bar",\n "bar": "baz"\n },\n 1: {\n "id": "6",\n "title": "Page 2",\n "foo": "bar",\n "bar": "baz"\n }\n ]\n */\n await mapper.db.dispose()\n}\nmain()\n')),(0,t.kt)("p",null,"Since hooks are wrappers, they are being called in reverse order, like the image below"),(0,t.kt)("p",null,(0,t.kt)("img",{alt:"Hooks Lifecycle",src:i(2879).Z,width:"791",height:"91"})),(0,t.kt)("p",null,"So even though we defined two hooks, the Database will be hit only once."),(0,t.kt)("p",null,"Query result will be processed by ",(0,t.kt)("inlineCode",{parentName:"p"},"firstHook"),", which will pass the result to ",(0,t.kt)("inlineCode",{parentName:"p"},"secondHook"),", which will, finally, send the processed result to the original ",(0,t.kt)("inlineCode",{parentName:"p"},".find({...})")," function."))}s.isMDXComponent=!0},2879:(e,n,i)=>{i.d(n,{Z:()=>l});const l="data:image/svg+xml;base64,<?xml version="1.0" encoding="UTF-8"?>
<!-- Do not edit this file with editors other than diagrams.net -->
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" version="1.1" width="791px" height="91px" viewBox="-0.5 -0.5 791 91" content="&lt;mxfile host=&quot;app.diagrams.net&quot; modified=&quot;2022-09-06T14:15:55.483Z&quot; agent=&quot;5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/105.0.0.0 Safari/537.36&quot; etag=&quot;qiUxBvqIGgsE9hRC6q7D&quot; version=&quot;20.2.8&quot; type=&quot;device&quot;&gt;&lt;diagram id=&quot;29r9i5743jBnMzNGYpPD&quot; name=&quot;Page-1&quot;&gt;5VdNc9owEP01zLSHMPgDDFcICYd+TTm0VwUvthLZ68prDPn1lZGMcZ0QZwqDZ3JCetq1dvfprVDPmUXbe8mS8Cv6IHr2wN/2nNuebVvWeKR+CmSnEc9yNRBI7hujCljyZzDgwKAZ9yGtGRKiIJ7UwRXGMayohjEpMa+brVHUd01YAA1guWKiif7iPoUaHdtehS+AB2G5szWa6JWIlcYmkzRkPuZHkDPvOTOJSHoUbWcgiuKVddF+d6+sHgKTEFMbh1wu15mXbL47v58e7/nz9sfP6Maws2EiMwmbYGlXVgB8VRAzRUkhBhgzMa/QqcQs9qHYZqBmlc0XxESBlgIfgWhn2GUZoYJCioRZXWNMdyziojgkM8wkB6mC+AaqXFMdUBHFq4kbKFWeKziRrUmOmAyATti5B3rUuQaMgORO+UkQjPimHgczByw42FUcqIGh4R2U2C9QMhJkylScYWaSHP3JisMzrVfsAKtRsHfjMqUF4tOnz+WXVGD6Y9qiwXnFaEFPHnKCZaK3zZXO6+yZeEESbE9T1CypcXAdIxLTJezhUM/zSnNWKaTwSG+jwYVYGH4oYTgthWFfUxjOuYWRgro1/G4rwx53TRnj7iljAWIDxFfsvKpwW6rCu6Yq3HOrQpWJ066/5rHfXVkMOycL7908VKe2wcEtI/bAUmhbfvXfMimGvvJLCSW8TUKLq+W/GPL+vdLdQYOh8QsETS5FUPmi6FLjuuCVPmnZvJxrNq/JuZtXxJIEZH/fwzik/eJ91vFWZr/dyewzdTI1rd6b+7WjV7sz/ws=&lt;/diagram&gt;&lt;/mxfile&gt;" style="background-color: rgb(255, 255, 255);"><defs/><g><path d="M 530 45 L 550 45 L 540 45 L 553.63 45" fill="none" stroke="rgb(0, 0, 0)" stroke-miterlimit="10" pointer-events="stroke"/><path d="M 558.88 45 L 551.88 48.5 L 553.63 45 L 551.88 41.5 Z" fill="rgb(0, 0, 0)" stroke="rgb(0, 0, 0)" stroke-miterlimit="10" pointer-events="all"/><rect x="410" y="15" width="120" height="60" rx="9" ry="9" fill="rgb(255, 255, 255)" stroke="rgb(0, 0, 0)" pointer-events="all"/><g transform="translate(-0.5 -0.5)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 118px; height: 1px; padding-top: 45px; margin-left: 411px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: Helvetica; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;"><font face="Courier New">firstHook()</font></div></div></div></foreignObject><text x="470" y="49" fill="rgb(0, 0, 0)" font-family="Helvetica" font-size="12px" text-anchor="middle">firstHook()</text></switch></g><path d="M 380 45 L 400 45 L 390 45 L 403.63 45" fill="none" stroke="rgb(0, 0, 0)" stroke-miterlimit="10" pointer-events="stroke"/><path d="M 408.88 45 L 401.88 48.5 L 403.63 45 L 401.88 41.5 Z" fill="rgb(0, 0, 0)" stroke="rgb(0, 0, 0)" stroke-miterlimit="10" pointer-events="all"/><rect x="260" y="15" width="120" height="60" rx="9" ry="9" fill="rgb(255, 255, 255)" stroke="rgb(0, 0, 0)" pointer-events="all"/><g transform="translate(-0.5 -0.5)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 118px; height: 1px; padding-top: 45px; margin-left: 261px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: Helvetica; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;"><font face="Courier New">secondHook()</font></div></div></div></foreignObject><text x="320" y="49" fill="rgb(0, 0, 0)" font-family="Helvetica" font-size="12px" text-anchor="middle">secondHook()</text></switch></g><path d="M 680 45 L 700 45 L 690 45 L 703.63 45" fill="none" stroke="rgb(0, 0, 0)" stroke-miterlimit="10" pointer-events="stroke"/><path d="M 708.88 45 L 701.88 48.5 L 703.63 45 L 701.88 41.5 Z" fill="rgb(0, 0, 0)" stroke="rgb(0, 0, 0)" stroke-miterlimit="10" pointer-events="all"/><rect x="560" y="15" width="120" height="60" rx="9" ry="9" fill="rgb(255, 255, 255)" stroke="rgb(0, 0, 0)" pointer-events="all"/><g transform="translate(-0.5 -0.5)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 118px; height: 1px; padding-top: 45px; margin-left: 561px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: Helvetica; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;"><font face="Courier New">entity.find()</font></div></div></div></foreignObject><text x="620" y="49" fill="rgb(0, 0, 0)" font-family="Helvetica" font-size="12px" text-anchor="middle">entity.find()</text></switch></g><path d="M 710 11 C 710 -3.67 790 -3.67 790 11 L 790 79 C 790 93.67 710 93.67 710 79 Z" fill="rgb(255, 255, 255)" stroke="rgb(0, 0, 0)" stroke-miterlimit="10" pointer-events="all"/><path d="M 710 11 C 710 22 790 22 790 11 M 710 16.5 C 710 27.5 790 27.5 790 16.5 M 710 22 C 710 33 790 33 790 22" fill="none" stroke="rgb(0, 0, 0)" stroke-miterlimit="10" pointer-events="all"/><g transform="translate(-0.5 -0.5)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 78px; height: 1px; padding-top: 59px; margin-left: 711px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: &quot;Courier New&quot;; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;"><font face="Helvetica">Database</font></div></div></div></foreignObject><text x="750" y="63" fill="rgb(0, 0, 0)" font-family="Courier New" font-size="12px" text-anchor="middle">Database</text></switch></g><path d="M 220 45 L 253.63 45" fill="none" stroke="rgb(0, 0, 0)" stroke-miterlimit="10" pointer-events="stroke"/><path d="M 258.88 45 L 251.88 48.5 L 253.63 45 L 251.88 41.5 Z" fill="rgb(0, 0, 0)" stroke="rgb(0, 0, 0)" stroke-miterlimit="10" pointer-events="all"/><rect x="0" y="15" width="220" height="60" rx="9" ry="9" fill="rgb(255, 255, 255)" stroke="rgb(0, 0, 0)" pointer-events="all"/><g transform="translate(-0.5 -0.5)"><switch><foreignObject pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility" style="overflow: visible; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 218px; height: 1px; padding-top: 45px; margin-left: 1px;"><div data-drawio-colors="color: rgb(0, 0, 0); " style="box-sizing: border-box; font-size: 0px; text-align: center;"><div style="display: inline-block; font-size: 12px; font-family: Helvetica; color: rgb(0, 0, 0); line-height: 1.2; pointer-events: all; white-space: normal; overflow-wrap: normal;"><font face="Courier New">mapper.entities.page.find()</font></div></div></div></foreignObject><text x="110" y="49" fill="rgb(0, 0, 0)" font-family="Helvetica" font-size="12px" text-anchor="middle">mapper.entities.page.find()</text></switch></g></g><switch><g requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility"/><a transform="translate(0,-5)" xlink:href="https://www.diagrams.net/doc/faq/svg-export-text-problems" target="_blank"><text text-anchor="middle" font-size="10px" x="50%" y="100%">Text is not SVG - cannot display</text></a></switch></svg>"}}]); \ No newline at end of file diff --git a/assets/js/09555e24.dba993ac.js b/assets/js/09555e24.dba993ac.js new file mode 100644 index 00000000000..c268ec3ead7 --- /dev/null +++ b/assets/js/09555e24.dba993ac.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkplatformatic_oss_website=self.webpackChunkplatformatic_oss_website||[]).push([[68616],{3905:(e,t,n)=>{n.d(t,{Zo:()=>m,kt:()=>u});var a=n(67294);function i(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function r(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);t&&(a=a.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,a)}return n}function o(e){for(var t=1;t=0||(i[n]=e[n]);return i}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(i[n]=e[n])}return i}var p=a.createContext({}),s=function(e){var t=a.useContext(p),n=t;return e&&(n="function"==typeof e?e(t):o(o({},t),e)),n},m=function(e){var t=s(e.components);return a.createElement(p.Provider,{value:t},e.children)},c="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return a.createElement(a.Fragment,{},t)}},k=a.forwardRef((function(e,t){var n=e.components,i=e.mdxType,r=e.originalType,p=e.parentName,m=l(e,["components","mdxType","originalType","parentName"]),c=s(n),k=i,u=c["".concat(p,".").concat(k)]||c[k]||d[k]||r;return n?a.createElement(u,o(o({ref:t},m),{},{components:n})):a.createElement(u,o({ref:t},m))}));function u(e,t){var n=arguments,i=t&&t.mdxType;if("string"==typeof e||i){var r=n.length,o=new Array(r);o[0]=k;var l={};for(var p in t)hasOwnProperty.call(t,p)&&(l[p]=t[p]);l.originalType=e,l[c]="string"==typeof e?e:i,o[1]=l;for(var s=2;s{n.r(t),n.d(t,{assets:()=>p,contentTitle:()=>o,default:()=>d,frontMatter:()=>r,metadata:()=>l,toc:()=>s});var a=n(87462),i=(n(67294),n(3905));const r={},o="Configuration",l={unversionedId:"reference/composer/configuration",id:"version-0.42.0/reference/composer/configuration",title:"Configuration",description:"Platformatic Composer configured with a configuration file. It supports the use",source:"@site/versioned_docs/version-0.42.0/reference/composer/configuration.md",sourceDirName:"reference/composer",slug:"/reference/composer/configuration",permalink:"/docs/0.42.0/reference/composer/configuration",draft:!1,editUrl:"https://github.com/platformatic/oss/edit/main/versioned_docs/version-0.42.0/reference/composer/configuration.md",tags:[],version:"0.42.0",frontMatter:{},sidebar:"docs",previous:{title:"Platformatic Composer",permalink:"/docs/0.42.0/reference/composer/introduction"},next:{title:"Programmatic API",permalink:"/docs/0.42.0/reference/composer/programmatic"}},p={},s=[{value:"Configuration file",id:"configuration-file",level:2},{value:"Supported formats",id:"supported-formats",level:3},{value:"Settings",id:"settings",level:2},{value:"server",id:"server",level:3},{value:"metrics",id:"metrics",level:3},{value:"plugins",id:"plugins",level:3},{value:"watch",id:"watch",level:3},{value:"composer",id:"composer",level:3},{value:"openapi",id:"openapi",level:4},{value:"openapi-configuration",id:"openapi-configuration",level:5},{value:"telemetry",id:"telemetry",level:3},{value:"Environment variable placeholders",id:"environment-variable-placeholders",level:2},{value:"Example",id:"example",level:3},{value:"Setting environment variables",id:"setting-environment-variables",level:3},{value:"Allowed placeholder names",id:"allowed-placeholder-names",level:3}],m={toc:s},c="wrapper";function d(e){let{components:t,...n}=e;return(0,i.kt)(c,(0,a.Z)({},m,n,{components:t,mdxType:"MDXLayout"}),(0,i.kt)("h1",{id:"configuration"},"Configuration"),(0,i.kt)("p",null,"Platformatic Composer configured with a configuration file. It supports the use\nof environment variables as setting values with ",(0,i.kt)("a",{parentName:"p",href:"#configuration-placeholders"},"configuration placeholders"),"."),(0,i.kt)("h2",{id:"configuration-file"},"Configuration file"),(0,i.kt)("p",null,"If the Platformatic CLI finds a file in the current working directory matching\none of these filenames, it will automatically load it:"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"platformatic.composer.json")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"platformatic.composer.json5")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"platformatic.composer.yml")," or ",(0,i.kt)("inlineCode",{parentName:"li"},"platformatic.composer.yaml")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"platformatic.composer.tml")," or ",(0,i.kt)("inlineCode",{parentName:"li"},"platformatic.composer.toml"))),(0,i.kt)("p",null,"Alternatively, a ",(0,i.kt)("a",{parentName:"p",href:"/docs/0.42.0/reference/cli#composer"},(0,i.kt)("inlineCode",{parentName:"a"},"--config")," option")," with a configuration\nfilepath can be passed to most ",(0,i.kt)("inlineCode",{parentName:"p"},"platformatic composer")," CLI commands."),(0,i.kt)("p",null,"The configuration examples in this reference use JSON."),(0,i.kt)("h3",{id:"supported-formats"},"Supported formats"),(0,i.kt)("table",null,(0,i.kt)("thead",{parentName:"table"},(0,i.kt)("tr",{parentName:"thead"},(0,i.kt)("th",{parentName:"tr",align:"left"},"Format"),(0,i.kt)("th",{parentName:"tr",align:"left"},"Extensions"))),(0,i.kt)("tbody",{parentName:"table"},(0,i.kt)("tr",{parentName:"tbody"},(0,i.kt)("td",{parentName:"tr",align:"left"},"JSON"),(0,i.kt)("td",{parentName:"tr",align:"left"},(0,i.kt)("inlineCode",{parentName:"td"},".json"))),(0,i.kt)("tr",{parentName:"tbody"},(0,i.kt)("td",{parentName:"tr",align:"left"},"JSON5"),(0,i.kt)("td",{parentName:"tr",align:"left"},(0,i.kt)("inlineCode",{parentName:"td"},".json5"))),(0,i.kt)("tr",{parentName:"tbody"},(0,i.kt)("td",{parentName:"tr",align:"left"},"YAML"),(0,i.kt)("td",{parentName:"tr",align:"left"},(0,i.kt)("inlineCode",{parentName:"td"},".yml"),", ",(0,i.kt)("inlineCode",{parentName:"td"},".yaml"))),(0,i.kt)("tr",{parentName:"tbody"},(0,i.kt)("td",{parentName:"tr",align:"left"},"TOML"),(0,i.kt)("td",{parentName:"tr",align:"left"},(0,i.kt)("inlineCode",{parentName:"td"},".tml"))))),(0,i.kt)("p",null,"Comments are supported by the JSON5, YAML and TOML file formats."),(0,i.kt)("h2",{id:"settings"},"Settings"),(0,i.kt)("p",null,"Configuration settings are organised into the following groups:"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("a",{parentName:"li",href:"#server"},(0,i.kt)("inlineCode",{parentName:"a"},"server"))," ",(0,i.kt)("strong",{parentName:"li"},"(required)")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("a",{parentName:"li",href:"#composer"},(0,i.kt)("inlineCode",{parentName:"a"},"composer"))),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("a",{parentName:"li",href:"#metrics"},(0,i.kt)("inlineCode",{parentName:"a"},"metrics"))),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("a",{parentName:"li",href:"#plugins"},(0,i.kt)("inlineCode",{parentName:"a"},"plugins"))),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("a",{parentName:"li",href:"#telemetry"},(0,i.kt)("inlineCode",{parentName:"a"},"telemetry")))),(0,i.kt)("p",null,"Sensitive configuration settings containing sensitive data should be set using ",(0,i.kt)("a",{parentName:"p",href:"#configuration-placeholders"},"configuration placeholders"),"."),(0,i.kt)("h3",{id:"server"},(0,i.kt)("inlineCode",{parentName:"h3"},"server")),(0,i.kt)("p",null,"A ",(0,i.kt)("strong",{parentName:"p"},"required")," object with the following settings:"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},(0,i.kt)("strong",{parentName:"p"},(0,i.kt)("inlineCode",{parentName:"strong"},"hostname"))," (",(0,i.kt)("strong",{parentName:"p"},"required"),", ",(0,i.kt)("inlineCode",{parentName:"p"},"string"),") \u2014 Hostname where Platformatic Composer server will listen for connections.")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},(0,i.kt)("strong",{parentName:"p"},(0,i.kt)("inlineCode",{parentName:"strong"},"port"))," (",(0,i.kt)("strong",{parentName:"p"},"required"),", ",(0,i.kt)("inlineCode",{parentName:"p"},"number"),") \u2014 Port where Platformatic Composer server will listen for connections.")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},(0,i.kt)("strong",{parentName:"p"},(0,i.kt)("inlineCode",{parentName:"strong"},"healthCheck"))," (",(0,i.kt)("inlineCode",{parentName:"p"},"boolean")," or ",(0,i.kt)("inlineCode",{parentName:"p"},"object"),") \u2014 Enables the health check endpoint."),(0,i.kt)("ul",{parentName:"li"},(0,i.kt)("li",{parentName:"ul"},"Powered by ",(0,i.kt)("a",{parentName:"li",href:"https://github.com/fastify/under-pressure"},(0,i.kt)("inlineCode",{parentName:"a"},"@fastify/under-pressure")),"."),(0,i.kt)("li",{parentName:"ul"},"The value can be an object, used to specify the interval between checks in milliseconds (default: ",(0,i.kt)("inlineCode",{parentName:"li"},"5000"),")")),(0,i.kt)("p",{parentName:"li"},(0,i.kt)("em",{parentName:"p"},"Example")),(0,i.kt)("pre",{parentName:"li"},(0,i.kt)("code",{parentName:"pre",className:"language-json"},'{\n "server": {\n ...\n "healthCheck": {\n "interval": 2000\n }\n }\n}\n'))),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},(0,i.kt)("strong",{parentName:"p"},(0,i.kt)("inlineCode",{parentName:"strong"},"cors"))," (",(0,i.kt)("inlineCode",{parentName:"p"},"object"),") \u2014 Configuration for Cross-Origin Resource Sharing (CORS) headers."),(0,i.kt)("ul",{parentName:"li"},(0,i.kt)("li",{parentName:"ul"},"All options will be passed to the ",(0,i.kt)("a",{parentName:"li",href:"https://github.com/fastify/fastify-cors"},(0,i.kt)("inlineCode",{parentName:"a"},"@fastify/cors"))," plugin. In order to specify a ",(0,i.kt)("inlineCode",{parentName:"li"},"RegExp")," object, you can pass ",(0,i.kt)("inlineCode",{parentName:"li"},"{ regexp: 'yourregexp' }"),",\nit will be automatically converted."))),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},(0,i.kt)("strong",{parentName:"p"},(0,i.kt)("inlineCode",{parentName:"strong"},"logger"))," (",(0,i.kt)("inlineCode",{parentName:"p"},"object"),") -- the ",(0,i.kt)("a",{parentName:"p",href:"https://www.fastify.io/docs/latest/Reference/Server/#logger"},"logger configuration"),".")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},(0,i.kt)("strong",{parentName:"p"},(0,i.kt)("inlineCode",{parentName:"strong"},"pluginTimeout"))," (",(0,i.kt)("inlineCode",{parentName:"p"},"integer"),") -- the number of milliseconds to wait for a Fastify plugin to load, see the ",(0,i.kt)("a",{parentName:"p",href:"https://www.fastify.io/docs/latest/Reference/Server/#plugintimeout"},"fastify docs")," for more details.")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},(0,i.kt)("strong",{parentName:"p"},(0,i.kt)("inlineCode",{parentName:"strong"},"https"))," (",(0,i.kt)("inlineCode",{parentName:"p"},"object"),") - Configuration for HTTPS supporting the following options."),(0,i.kt)("ul",{parentName:"li"},(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"key")," (",(0,i.kt)("strong",{parentName:"li"},"required"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"string"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"object"),", or ",(0,i.kt)("inlineCode",{parentName:"li"},"array"),") - If ",(0,i.kt)("inlineCode",{parentName:"li"},"key")," is a string, it specifies the private key to be used. If ",(0,i.kt)("inlineCode",{parentName:"li"},"key")," is an object, it must have a ",(0,i.kt)("inlineCode",{parentName:"li"},"path")," property specifying the private key file. Multiple keys are supported by passing an array of keys."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"cert")," (",(0,i.kt)("strong",{parentName:"li"},"required"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"string"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"object"),", or ",(0,i.kt)("inlineCode",{parentName:"li"},"array"),") - If ",(0,i.kt)("inlineCode",{parentName:"li"},"cert")," is a string, it specifies the certificate to be used. If ",(0,i.kt)("inlineCode",{parentName:"li"},"cert")," is an object, it must have a ",(0,i.kt)("inlineCode",{parentName:"li"},"path")," property specifying the certificate file. Multiple certificates are supported by passing an array of keys.")))),(0,i.kt)("h3",{id:"metrics"},(0,i.kt)("inlineCode",{parentName:"h3"},"metrics")),(0,i.kt)("p",null,"Configuration for a ",(0,i.kt)("a",{parentName:"p",href:"https://prometheus.io/"},"Prometheus")," server that will export monitoring metrics\nfor the current server instance. It uses ",(0,i.kt)("a",{parentName:"p",href:"https://github.com/SkeLLLa/fastify-metrics"},(0,i.kt)("inlineCode",{parentName:"a"},"fastify-metrics")),"\nunder the hood."),(0,i.kt)("p",null,"This setting can be a ",(0,i.kt)("inlineCode",{parentName:"p"},"boolean")," or an ",(0,i.kt)("inlineCode",{parentName:"p"},"object"),". If set to ",(0,i.kt)("inlineCode",{parentName:"p"},"true")," the Prometheus server will listen on ",(0,i.kt)("inlineCode",{parentName:"p"},"http://0.0.0.0:9090"),"."),(0,i.kt)("p",null,"Supported object properties:"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("strong",{parentName:"li"},(0,i.kt)("inlineCode",{parentName:"strong"},"hostname"))," (",(0,i.kt)("inlineCode",{parentName:"li"},"string"),") \u2014 The hostname where Prometheus server will listen for connections."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("strong",{parentName:"li"},(0,i.kt)("inlineCode",{parentName:"strong"},"port"))," (",(0,i.kt)("inlineCode",{parentName:"li"},"number"),") \u2014 The port where Prometheus server will listen for connections."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("strong",{parentName:"li"},(0,i.kt)("inlineCode",{parentName:"strong"},"auth"))," (",(0,i.kt)("inlineCode",{parentName:"li"},"object"),") \u2014 Basic Auth configuration. ",(0,i.kt)("strong",{parentName:"li"},(0,i.kt)("inlineCode",{parentName:"strong"},"username"))," and ",(0,i.kt)("strong",{parentName:"li"},(0,i.kt)("inlineCode",{parentName:"strong"},"password"))," are required here\n(use ",(0,i.kt)("a",{parentName:"li",href:"#environment-variables"},"environment variables"),").")),(0,i.kt)("h3",{id:"plugins"},(0,i.kt)("inlineCode",{parentName:"h3"},"plugins")),(0,i.kt)("p",null,"An optional object that defines the plugins loaded by Platformatic Composer."),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},(0,i.kt)("strong",{parentName:"p"},(0,i.kt)("inlineCode",{parentName:"strong"},"paths"))," (",(0,i.kt)("strong",{parentName:"p"},"required"),", ",(0,i.kt)("inlineCode",{parentName:"p"},"array"),"): an array of paths (",(0,i.kt)("inlineCode",{parentName:"p"},"string"),")\nor an array of objects composed as follows,"),(0,i.kt)("ul",{parentName:"li"},(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"path")," (",(0,i.kt)("inlineCode",{parentName:"li"},"string"),"): Relative path to plugin's entry point."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"options")," (",(0,i.kt)("inlineCode",{parentName:"li"},"object"),"): Optional plugin options."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"encapsulate")," (",(0,i.kt)("inlineCode",{parentName:"li"},"boolean"),"): if the path is a folder, it instruct Platformatic to not encapsulate those plugins."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"maxDepth")," (",(0,i.kt)("inlineCode",{parentName:"li"},"integer"),"): if the path is a folder, it limits the depth to load the content from."))),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},(0,i.kt)("strong",{parentName:"p"},(0,i.kt)("inlineCode",{parentName:"strong"},"typescript"))," (",(0,i.kt)("inlineCode",{parentName:"p"},"boolean"),"): enable typescript compilation. A ",(0,i.kt)("inlineCode",{parentName:"p"},"tsconfig.json")," file is required in the same folder."),(0,i.kt)("p",{parentName:"li"},(0,i.kt)("em",{parentName:"p"},"Example")),(0,i.kt)("pre",{parentName:"li"},(0,i.kt)("code",{parentName:"pre",className:"language-json"},'{\n "plugins": {\n "paths": [{\n "path": "./my-plugin.js",\n "options": {\n "foo": "bar"\n }\n }]\n }\n}\n')))),(0,i.kt)("h3",{id:"watch"},(0,i.kt)("inlineCode",{parentName:"h3"},"watch")),(0,i.kt)("p",null,"Disable watching for file changes if set to ",(0,i.kt)("inlineCode",{parentName:"p"},"false"),". It can also be customized with the following options:"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},(0,i.kt)("strong",{parentName:"p"},(0,i.kt)("inlineCode",{parentName:"strong"},"ignore"))," (",(0,i.kt)("inlineCode",{parentName:"p"},"string[]"),", default: ",(0,i.kt)("inlineCode",{parentName:"p"},"null"),"): List of glob patterns to ignore when watching for changes. If ",(0,i.kt)("inlineCode",{parentName:"p"},"null")," or not specified, ignore rule is not applied. Ignore option doesn't work for typescript files.")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},(0,i.kt)("strong",{parentName:"p"},(0,i.kt)("inlineCode",{parentName:"strong"},"allow"))," (",(0,i.kt)("inlineCode",{parentName:"p"},"string[]"),", default: ",(0,i.kt)("inlineCode",{parentName:"p"},"['*.js', '**/*.js']"),"): List of glob patterns to allow when watching for changes. If ",(0,i.kt)("inlineCode",{parentName:"p"},"null")," or not specified, allow rule is not applied. Allow option doesn't work for typescript files."),(0,i.kt)("p",{parentName:"li"},(0,i.kt)("em",{parentName:"p"},"Example")),(0,i.kt)("pre",{parentName:"li"},(0,i.kt)("code",{parentName:"pre",className:"language-json"},'{\n "watch": {\n "ignore": ["*.mjs", "**/*.mjs"],\n "allow": ["my-plugin.js", "plugins/*.js"]\n }\n}\n')))),(0,i.kt)("h3",{id:"composer"},(0,i.kt)("inlineCode",{parentName:"h3"},"composer")),(0,i.kt)("p",null,"Configure ",(0,i.kt)("inlineCode",{parentName:"p"},"@platformatic/composer")," specific settings such as ",(0,i.kt)("inlineCode",{parentName:"p"},"services")," or ",(0,i.kt)("inlineCode",{parentName:"p"},"refreshTimeout"),":"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},(0,i.kt)("strong",{parentName:"p"},(0,i.kt)("inlineCode",{parentName:"strong"},"services"))," (",(0,i.kt)("inlineCode",{parentName:"p"},"array"),", default: ",(0,i.kt)("inlineCode",{parentName:"p"},"[]"),") \u2014 is an array of objects that defines\nthe services managed by the composer. Each service object supports the following settings:"),(0,i.kt)("ul",{parentName:"li"},(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("strong",{parentName:"li"},(0,i.kt)("inlineCode",{parentName:"strong"},"id"))," (",(0,i.kt)("strong",{parentName:"li"},"required"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"string"),") - A unique identifier for the service."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("strong",{parentName:"li"},(0,i.kt)("inlineCode",{parentName:"strong"},"origin"))," (",(0,i.kt)("inlineCode",{parentName:"li"},"string"),") - A service origin. Skip this option if the service is executing inside of Platformatic Runtime. In this case, service id will be used instead of origin."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("strong",{parentName:"li"},(0,i.kt)("inlineCode",{parentName:"strong"},"openapi"))," (",(0,i.kt)("strong",{parentName:"li"},"required"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"object"),") - The configuration file used to compose OpenAPI specification. See the ",(0,i.kt)("a",{parentName:"li",href:"#openapi"},"openapi")," for details."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("strong",{parentName:"li"},(0,i.kt)("inlineCode",{parentName:"strong"},"proxy"))," (",(0,i.kt)("inlineCode",{parentName:"li"},"object")," or ",(0,i.kt)("inlineCode",{parentName:"li"},"false"),") - Service proxy configuration. If ",(0,i.kt)("inlineCode",{parentName:"li"},"false"),", the service proxy is disabled.",(0,i.kt)("ul",{parentName:"li"},(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"prefix")," (",(0,i.kt)("strong",{parentName:"li"},"required"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"string"),") - Service proxy prefix. All service routes will be prefixed with this value."))),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("strong",{parentName:"li"},(0,i.kt)("inlineCode",{parentName:"strong"},"refreshTimeout"))," (",(0,i.kt)("inlineCode",{parentName:"li"},"number"),") - The number of milliseconds to wait for check for changes in the service OpenAPI specification. If not specified, the default value is ",(0,i.kt)("inlineCode",{parentName:"li"},"1000"),".")))),(0,i.kt)("h4",{id:"openapi"},(0,i.kt)("inlineCode",{parentName:"h4"},"openapi")),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("strong",{parentName:"li"},(0,i.kt)("inlineCode",{parentName:"strong"},"url"))," (",(0,i.kt)("inlineCode",{parentName:"li"},"string"),") - A path of the route that exposes the OpenAPI specification. If a service is a Platformatic Service or Platformatic DB, use ",(0,i.kt)("inlineCode",{parentName:"li"},"/documentation/json")," as a value. Use this or ",(0,i.kt)("inlineCode",{parentName:"li"},"file")," option to specify the OpenAPI specification."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("strong",{parentName:"li"},(0,i.kt)("inlineCode",{parentName:"strong"},"file"))," (",(0,i.kt)("inlineCode",{parentName:"li"},"string"),") - A path to the OpenAPI specification file. Use this or ",(0,i.kt)("inlineCode",{parentName:"li"},"url")," option to specify the OpenAPI specification."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("strong",{parentName:"li"},(0,i.kt)("inlineCode",{parentName:"strong"},"prefix"))," (",(0,i.kt)("inlineCode",{parentName:"li"},"string"),") - A prefix for the OpenAPI specification. All service routes will be prefixed with this value."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("strong",{parentName:"li"},(0,i.kt)("inlineCode",{parentName:"strong"},"config"))," (",(0,i.kt)("inlineCode",{parentName:"li"},"string"),") - A path to the OpenAPI configuration file. This file is used to customize the OpenAPI specification. See the ",(0,i.kt)("a",{parentName:"li",href:"#openapi-configuration"},"openapi-configuration")," for details.")),(0,i.kt)("h5",{id:"openapi-configuration"},(0,i.kt)("inlineCode",{parentName:"h5"},"openapi-configuration")),(0,i.kt)("p",null,"The OpenAPI configuration file is a JSON file that is used to customize the OpenAPI specification. It supports the following options:"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},(0,i.kt)("strong",{parentName:"p"},(0,i.kt)("inlineCode",{parentName:"strong"},"ignore"))," (",(0,i.kt)("inlineCode",{parentName:"p"},"boolean"),") - If ",(0,i.kt)("inlineCode",{parentName:"p"},"true"),", the route will be ignored by the composer.\nIf you want to ignore a specific method, use the ",(0,i.kt)("inlineCode",{parentName:"p"},"ignore")," option in the nested method object."),(0,i.kt)("p",{parentName:"li"}," ",(0,i.kt)("em",{parentName:"p"},"Example")),(0,i.kt)("pre",{parentName:"li"},(0,i.kt)("code",{parentName:"pre",className:"language-json"},'{\n "paths": {\n "/users": {\n "ignore": true\n },\n "/users/{id}": {\n "get": { "ignore": true },\n "put": { "ignore": true }\n }\n }\n}\n'))),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},(0,i.kt)("strong",{parentName:"p"},"alias")," (",(0,i.kt)("inlineCode",{parentName:"p"},"string"),") - Use it create an alias for the route path. Original route path will be ignored."),(0,i.kt)("p",{parentName:"li"},(0,i.kt)("em",{parentName:"p"},"Example")),(0,i.kt)("pre",{parentName:"li"},(0,i.kt)("code",{parentName:"pre",className:"language-json"},'{\n "paths": {\n "/users": {\n "alias": "/customers"\n }\n }\n}\n'))),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},(0,i.kt)("strong",{parentName:"p"},(0,i.kt)("inlineCode",{parentName:"strong"},"rename"))," (",(0,i.kt)("inlineCode",{parentName:"p"},"string"),") - Use it to rename composed route response fields.\nUse json schema format to describe the response structure. For now it works only for ",(0,i.kt)("inlineCode",{parentName:"p"},"200")," response."),(0,i.kt)("p",{parentName:"li"}," ",(0,i.kt)("em",{parentName:"p"},"Example")),(0,i.kt)("pre",{parentName:"li"},(0,i.kt)("code",{parentName:"pre",className:"language-json"},'{\n "paths": {\n "/users": {\n "responses": {\n "200": {\n "type": "array",\n "items": {\n "type": "object",\n "properties": {\n "id": { "rename": "user_id" },\n "name": { "rename": "first_name" }\n }\n }\n }\n }\n }\n }\n}\n')))),(0,i.kt)("p",null,(0,i.kt)("em",{parentName:"p"},"Examples")),(0,i.kt)("p",null," Composition of two remote services:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-json"},'{\n "composer": {\n "services": [\n {\n "id": "auth-service",\n "origin": "https://auth-service.com",\n "openapi": {\n "url": "/documentation/json",\n "prefix": "auth"\n }\n },\n {\n "id": "payment-service",\n "origin": "https://payment-service.com",\n "openapi": {\n "file": "./schemas/payment-service.json"\n }\n }\n ],\n "refreshTimeout": 1000\n }\n}\n')),(0,i.kt)("p",null," Composition of two local services inside of Platformatic Runtime:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-json"},'{\n "composer": {\n "services": [\n {\n "id": "auth-service",\n "openapi": {\n "url": "/documentation/json",\n "prefix": "auth"\n }\n },\n {\n "id": "payment-service",\n "openapi": {\n "file": "./schemas/payment-service.json"\n }\n }\n ],\n "refreshTimeout": 1000\n }\n}\n')),(0,i.kt)("h3",{id:"telemetry"},(0,i.kt)("inlineCode",{parentName:"h3"},"telemetry")),(0,i.kt)("p",null,(0,i.kt)("a",{parentName:"p",href:"https://opentelemetry.io/"},"Open Telemetry")," is optionally supported with these settings:"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("strong",{parentName:"li"},(0,i.kt)("inlineCode",{parentName:"strong"},"serviceName"))," (",(0,i.kt)("strong",{parentName:"li"},"required"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"string"),") \u2014 Name of the service as will be reported in open telemetry."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("strong",{parentName:"li"},(0,i.kt)("inlineCode",{parentName:"strong"},"version"))," (",(0,i.kt)("inlineCode",{parentName:"li"},"string"),") \u2014 Optional version (free form)"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("strong",{parentName:"li"},(0,i.kt)("inlineCode",{parentName:"strong"},"skip"))," (",(0,i.kt)("inlineCode",{parentName:"li"},"array"),"). Optional list of operations to skip when exporting telemetry defined ",(0,i.kt)("inlineCode",{parentName:"li"},"object")," with properties: ",(0,i.kt)("ul",{parentName:"li"},(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"method"),": GET, POST, PUT, DELETE, PATCH, HEAD, OPTIONS, TRACE"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"path"),". e.g.: ",(0,i.kt)("inlineCode",{parentName:"li"},"/documentation/json")," "))),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("strong",{parentName:"li"},(0,i.kt)("inlineCode",{parentName:"strong"},"exporter"))," (",(0,i.kt)("inlineCode",{parentName:"li"},"object")," or ",(0,i.kt)("inlineCode",{parentName:"li"},"array"),") \u2014 Exporter configuration. If not defined, the exporter defaults to ",(0,i.kt)("inlineCode",{parentName:"li"},"console"),". If an array of objects is configured, every object must be a valid exporter object. The exporter object has the following properties:",(0,i.kt)("ul",{parentName:"li"},(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("strong",{parentName:"li"},(0,i.kt)("inlineCode",{parentName:"strong"},"type"))," (",(0,i.kt)("inlineCode",{parentName:"li"},"string"),") \u2014 Exporter type. Supported values are ",(0,i.kt)("inlineCode",{parentName:"li"},"console"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"otlp"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"zipkin")," and ",(0,i.kt)("inlineCode",{parentName:"li"},"memory")," (default: ",(0,i.kt)("inlineCode",{parentName:"li"},"console"),"). ",(0,i.kt)("inlineCode",{parentName:"li"},"memory")," is only supported for testing purposes. "),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("strong",{parentName:"li"},(0,i.kt)("inlineCode",{parentName:"strong"},"options"))," (",(0,i.kt)("inlineCode",{parentName:"li"},"object"),") \u2014 These options are supported:",(0,i.kt)("ul",{parentName:"li"},(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("strong",{parentName:"li"},(0,i.kt)("inlineCode",{parentName:"strong"},"url"))," (",(0,i.kt)("inlineCode",{parentName:"li"},"string"),") \u2014 The URL to send the telemetry to. Required for ",(0,i.kt)("inlineCode",{parentName:"li"},"otlp")," exporter. This has no effect on ",(0,i.kt)("inlineCode",{parentName:"li"},"console")," and ",(0,i.kt)("inlineCode",{parentName:"li"},"memory")," exporters."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("strong",{parentName:"li"},(0,i.kt)("inlineCode",{parentName:"strong"},"headers"))," (",(0,i.kt)("inlineCode",{parentName:"li"},"object"),") \u2014 Optional headers to send with the telemetry. This has no effect on ",(0,i.kt)("inlineCode",{parentName:"li"},"console")," and ",(0,i.kt)("inlineCode",{parentName:"li"},"memory")," exporters.")))))),(0,i.kt)("p",null,"Note that OTLP traces can be consumed by different solutions, like ",(0,i.kt)("a",{parentName:"p",href:"https://www.jaegertracing.io/"},"Jaeger"),". ",(0,i.kt)("a",{parentName:"p",href:"https://opentelemetry.io/ecosystem/vendors/"},"Here")," the full list."),(0,i.kt)("p",null," ",(0,i.kt)("em",{parentName:"p"},"Example")),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-json"},'{\n "telemetry": {\n "serviceName": "test-service",\n "exporter": {\n "type": "otlp",\n "options": {\n "url": "http://localhost:4318/v1/traces"\n }\n }\n }\n}\n')),(0,i.kt)("h2",{id:"environment-variable-placeholders"},"Environment variable placeholders"),(0,i.kt)("p",null,"The value for any configuration setting can be replaced with an environment variable\nby adding a placeholder in the configuration file, for example ",(0,i.kt)("inlineCode",{parentName:"p"},"{PLT_SERVER_LOGGER_LEVEL}"),"."),(0,i.kt)("p",null,"All placeholders in a configuration must be available as an environment variable\nand must meet the ",(0,i.kt)("a",{parentName:"p",href:"#allowed-placeholder-names"},"allowed placeholder name")," rules."),(0,i.kt)("h3",{id:"example"},"Example"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-json",metastring:'title="platformatic.service.json"',title:'"platformatic.service.json"'},'{\n "server": {\n "port": "{PORT}"\n }\n}\n')),(0,i.kt)("p",null,"Platformatic will replace the placeholders in this example with the environment\nvariables of the same name."),(0,i.kt)("h3",{id:"setting-environment-variables"},"Setting environment variables"),(0,i.kt)("p",null,"If a ",(0,i.kt)("inlineCode",{parentName:"p"},".env")," file exists it will automatically be loaded by Platformatic using\n",(0,i.kt)("a",{parentName:"p",href:"https://github.com/motdotla/dotenv"},(0,i.kt)("inlineCode",{parentName:"a"},"dotenv")),". For example:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-plaintext",metastring:'title=".env"',title:'".env"'},"PLT_SERVER_LOGGER_LEVEL=info\nPORT=8080\n")),(0,i.kt)("p",null,"The ",(0,i.kt)("inlineCode",{parentName:"p"},".env")," file must be located in the same folder as the Platformatic configuration\nfile or in the current working directory."),(0,i.kt)("p",null,"Environment variables can also be set directly on the commmand line, for example:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"PLT_SERVER_LOGGER_LEVEL=debug npx platformatic composer\n")),(0,i.kt)("h3",{id:"allowed-placeholder-names"},"Allowed placeholder names"),(0,i.kt)("p",null,"Only placeholder names prefixed with ",(0,i.kt)("inlineCode",{parentName:"p"},"PLT_"),", or that are in this allow list, will be\ndynamically replaced in the configuration file:"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"PORT"))),(0,i.kt)("p",null,"This restriction is to avoid accidentally exposing system environment variables.\nAn error will be raised by Platformatic if it finds a configuration placeholder\nthat isn't allowed."),(0,i.kt)("p",null,"The default allow list can be extended by passing a ",(0,i.kt)("inlineCode",{parentName:"p"},"--allow-env")," CLI option with a\ncomma separated list of strings, for example:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"npx platformatic composer --allow-env=HOST,SERVER_LOGGER_LEVEL\n")),(0,i.kt)("p",null,"If ",(0,i.kt)("inlineCode",{parentName:"p"},"--allow-env")," is passed as an option to the CLI, it will be merged with the\ndefault allow list."))}d.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/09705aa7.58bc86ae.js b/assets/js/09705aa7.58bc86ae.js new file mode 100644 index 00000000000..980e33b61c9 --- /dev/null +++ b/assets/js/09705aa7.58bc86ae.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkplatformatic_oss_website=self.webpackChunkplatformatic_oss_website||[]).push([[19730],{61677:(e,t,a)=>{a.r(t),a.d(t,{assets:()=>s,contentTitle:()=>m,default:()=>f,frontMatter:()=>p,metadata:()=>c,toc:()=>d});var n=a(87462),l=(a(67294),a(3905)),i=a(74866),o=a(85162),r=a(3901);const p={toc_max_heading_level:4},m="Platformatic CLI",c={unversionedId:"reference/cli",id:"version-0.41.2/reference/cli",title:"Platformatic CLI",description:"Installation and usage",source:"@site/versioned_docs/version-0.41.2/reference/cli.md",sourceDirName:"reference",slug:"/reference/cli",permalink:"/docs/0.41.2/reference/cli",draft:!1,editUrl:"https://github.com/platformatic/oss/edit/main/versioned_docs/version-0.41.2/reference/cli.md",tags:[],version:"0.41.2",frontMatter:{toc_max_heading_level:4},sidebar:"docs",previous:{title:"Reference",permalink:"/docs/0.41.2/category/reference"},next:{title:"Platformatic Composer",permalink:"/docs/0.41.2/reference/composer/introduction"}},s={},d=[{value:"Installation and usage",id:"installation-and-usage",level:2},{value:"Commands",id:"commands",level:2},{value:"help",id:"help",level:3},{value:"compile",id:"compile",level:4},{value:"deploy",id:"deploy",level:4},{value:"gh",id:"gh",level:4},{value:"start",id:"start",level:4},{value:"upgrade",id:"upgrade",level:4},{value:"client",id:"client",level:3},{value:"help",id:"help-1",level:4},{value:"composer",id:"composer",level:3},{value:"help",id:"help-2",level:4},{value:"openapi schemas fetch",id:"openapi-schemas-fetch",level:4},{value:"start",id:"start-1",level:4},{value:"db",id:"db",level:3},{value:"compile",id:"compile-1",level:4},{value:"help",id:"help-3",level:4},{value:"migrations apply",id:"migrations-apply",level:4},{value:"migrations create",id:"migrations-create",level:4},{value:"migrations",id:"migrations",level:4},{value:"schema",id:"schema",level:4},{value:"seed",id:"seed",level:4},{value:"start",id:"start-2",level:4},{value:"types",id:"types",level:4},{value:"service",id:"service",level:3},{value:"compile",id:"compile-2",level:4},{value:"help",id:"help-4",level:4},{value:"schema",id:"schema-1",level:4},{value:"start",id:"start-3",level:4},{value:"frontend",id:"frontend",level:3},{value:"runtime",id:"runtime",level:3},{value:"compile",id:"compile-3",level:4},{value:"help",id:"help-5",level:4},{value:"start",id:"start-4",level:4},{value:"start",id:"start-5",level:3}],u={toc:d},k="wrapper";function f(e){let{components:t,...a}=e;return(0,l.kt)(k,(0,n.Z)({},u,a,{components:t,mdxType:"MDXLayout"}),(0,l.kt)("h1",{id:"platformatic-cli"},"Platformatic CLI"),(0,l.kt)("h2",{id:"installation-and-usage"},"Installation and usage"),(0,l.kt)("p",null,"Install the Platformatic CLI as a dependency for your project:"),(0,l.kt)(i.Z,{groupId:"package-manager",mdxType:"Tabs"},(0,l.kt)(o.Z,{value:"npm",label:"npm",mdxType:"TabItem"},(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"npm install platformatic\n"))),(0,l.kt)(o.Z,{value:"yarn",label:"Yarn",mdxType:"TabItem"},(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"yarn add platformatic\n"))),(0,l.kt)(o.Z,{value:"pnpm",label:"pnpm",mdxType:"TabItem"},(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"pnpm add platformatic\n")))),(0,l.kt)("p",null,"Once it's installed you can run it with:"),(0,l.kt)(i.Z,{groupId:"package-manager",mdxType:"Tabs"},(0,l.kt)(o.Z,{value:"npm",label:"npm",mdxType:"TabItem"},(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"npx platformatic\n"))),(0,l.kt)(o.Z,{value:"yarn",label:"Yarn",mdxType:"TabItem"},(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"yarn platformatic\n"))),(0,l.kt)(o.Z,{value:"pnpm",label:"pnpm",mdxType:"TabItem"},(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"pnpm platformatic\n")))),(0,l.kt)("admonition",{type:"info"},(0,l.kt)("p",{parentName:"admonition"},"The ",(0,l.kt)("inlineCode",{parentName:"p"},"platformatic")," package can be installed globally, but installing it as a\nproject dependency ensures that everyone working on the project is using the\nsame version of the Platformatic CLI.")),(0,l.kt)("h2",{id:"commands"},"Commands"),(0,l.kt)("p",null,"The Platformatic CLI provides the following commands:"),(0,l.kt)(r.Z,{toc:d,minHeadingLevel:3,maxHeadingLevel:4,mdxType:"TOCInline"}),(0,l.kt)("h3",{id:"help"},"help"),(0,l.kt)("p",null,"Welcome to Platformatic. Available commands are:"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"help")," - display this message."),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"help ")," - show more information about a command."),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"db")," - start Platformatic DB; type ",(0,l.kt)("inlineCode",{parentName:"li"},"platformatic db help")," to know more."),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"service")," - start Platformatic Service; type ",(0,l.kt)("inlineCode",{parentName:"li"},"platformatic service help")," to know more."),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"upgrade")," - upgrade the Platformatic configuration to the latest version."),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"gh")," - create a new gh action for Platformatic deployments."),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"deploy")," - deploy a Platformatic application to the cloud."),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"runtime")," - start Platformatic Runtime; type ",(0,l.kt)("inlineCode",{parentName:"li"},"platformatic runtime help")," to know more."),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"start")," - start a Platformatic application."),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"frontend"),"- create frontend code to consume the REST APIs.")),(0,l.kt)("h4",{id:"compile"},"compile"),(0,l.kt)("p",null,"Compile all typescript plugins."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"}," $ platformatic compile\n")),(0,l.kt)("p",null,"This command will compile the TypeScript plugins for each platformatic application."),(0,l.kt)("h4",{id:"deploy"},"deploy"),(0,l.kt)("p",null,"Deploys an application to the ",(0,l.kt)("a",{parentName:"p",href:"https://docs.platformatic.dev/docs/category/platformatic-cloud"},"Platformatic Cloud"),"."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"}," $ platformatic deploy\n")),(0,l.kt)("p",null,"Options:"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"-t, --type static/dynamic")," - The type of the workspace."),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"-c, --config FILE")," - Specify a configuration file to use."),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"-k, --keys FILE")," - Specify a path to the workspace keys file."),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"-l --label TEXT")," - The deploy label. Only for dynamic workspaces."),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"-e --env FILE"),' - The environment file to use. Default: ".env"'),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"-s --secrets FILE"),' - The secrets file to use. Default: ".secrets.env"'),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--workspace-id uuid")," - The workspace id where the application will be deployed."),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--workspace-key TEXT")," - The workspace key where the application will be deployed.")),(0,l.kt)("ol",null,(0,l.kt)("li",{parentName:"ol"},"To deploy a Platformatic application to the cloud, you should go to the Platformatic cloud dashboard and create a workspace."),(0,l.kt)("li",{parentName:"ol"},"Once you have created a workspace, retrieve your workspace id and key from the workspace settings page. Optionally, you can download the provided workspace env file, which you can use with the ",(0,l.kt)("inlineCode",{parentName:"li"},"--keys")," option.")),(0,l.kt)("blockquote",null,(0,l.kt)("p",{parentName:"blockquote"},"\u2139\ufe0f"),(0,l.kt)("p",{parentName:"blockquote"},"When deploying an application to a ",(0,l.kt)("strong",{parentName:"p"},(0,l.kt)("em",{parentName:"strong"},"dynamic workspace")),", specify the deploy ",(0,l.kt)("inlineCode",{parentName:"p"},"--label")," option. You can find it on your cloud dashboard or you can specify a new one.")),(0,l.kt)("h4",{id:"gh"},"gh"),(0,l.kt)("p",null,"Creates a gh action to deploy platformatic services on workspaces."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"}," $ platformatic gh -t dynamic\n")),(0,l.kt)("p",null,"Options:"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"-w --workspace ID")," - The workspace ID where the service will be deployed."),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"-t, --type static/dynamic")," - The type of the workspace. Defaults to static."),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"-c, --config FILE")," - Specify a configuration file to use."),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"-b, --build")," - Build the service before deploying (",(0,l.kt)("inlineCode",{parentName:"li"},"npm run build"),").")),(0,l.kt)("p",null,"If not specified, the configuration will be loaded from any of the following, in the current directory."),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"platformatic.db.json"),", or"),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"platformatic.db.yml"),", or "),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"platformatic.db.tml"),", or "),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"platformatic.service.json"),", or"),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"platformatic.service.yml"),", or "),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"platformatic.service.tml"))),(0,l.kt)("p",null,"You can find more details about the configuration format here:"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("a",{parentName:"li",href:"https://docs.platformatic.dev/docs/reference/db/configuration"},"Platformatic DB Configuration")),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("a",{parentName:"li",href:"https://docs.platformatic.dev/docs/reference/service/configuration"},"Platformatic Service Configuration"))),(0,l.kt)("h4",{id:"start"},"start"),(0,l.kt)("p",null,"Start a Platformatic application with the following command:"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"$ platformatic start\n")),(0,l.kt)("p",null,"Options:"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"-c, --config ")," - Path to the configuration file."),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--inspect[=[host:]port]")," - Start the Node.js debugger. ",(0,l.kt)("inlineCode",{parentName:"li"},"host")," defaults to ",(0,l.kt)("inlineCode",{parentName:"li"},"'127.0.0.1'"),". ",(0,l.kt)("inlineCode",{parentName:"li"},"port")," defaults to 9229. Use caution when binding to a public host:port combination."),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--inspect-brk[=[host:]port]")," - Start the Node.js debugger and block until a client has attached. ",(0,l.kt)("inlineCode",{parentName:"li"},"host")," defaults to ",(0,l.kt)("inlineCode",{parentName:"li"},"'127.0.0.1'"),". ",(0,l.kt)("inlineCode",{parentName:"li"},"port")," defaults to 9229. Use caution when binding to a public host:port combination.")),(0,l.kt)("h4",{id:"upgrade"},"upgrade"),(0,l.kt)("p",null,"Upgrade the Platformatic schema configuration to the latest version."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"}," $ platformatic upgrade\n")),(0,l.kt)("p",null,"Options:"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"-c, --config FILE")," - Specify a schema configuration file to use.")),(0,l.kt)("p",null,"If not specified, the configuration will be loaded from any of the following, in the current directory."),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"platformatic.db.json"),", or"),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"platformatic.db.yml"),", or "),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"platformatic.db.tml"),", or "),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"platformatic.service.json"),", or"),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"platformatic.service.yml"),", or "),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"platformatic.service.tml"))),(0,l.kt)("p",null,"You can find more details about the configuration format here:"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("a",{parentName:"li",href:"https://docs.platformatic.dev/docs/reference/db/configuration"},"Platformatic DB Configuration")),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("a",{parentName:"li",href:"https://docs.platformatic.dev/docs/reference/service/configuration"},"Platformatic Service Configuration"))),(0,l.kt)("h3",{id:"client"},"client"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"platformatic client \n")),(0,l.kt)("h4",{id:"help-1"},"help"),(0,l.kt)("p",null,"Create a Fastify plugin that exposes a client for a remote OpenAPI or GraphQL API."),(0,l.kt)("p",null,"To create a client for a remote OpenAPI API, you can use the following command:"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"$ platformatic client http://example.com/to/schema/file -n myclient\n")),(0,l.kt)("p",null,"To create a client for a remote Graphql API, you can use the following command:"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"$ platformatic client http://example.com/graphql -n myclient\n")),(0,l.kt)("p",null,"Instead of a URL, you can also use a local file:"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"$ platformatic client path/to/schema -n myclient\n")),(0,l.kt)("p",null,"This will create a Fastify plugin that exposes a client for the remote API in a folder ",(0,l.kt)("inlineCode",{parentName:"p"},"myclient"),"\nand a file named myclient.js inside it."),(0,l.kt)("p",null,"If platformatic config file is specified, it will be edited and a ",(0,l.kt)("inlineCode",{parentName:"p"},"clients")," section will be added.\nThen, in any part of your Platformatic application you can use the client."),(0,l.kt)("p",null,"You can use the client in your application in Javascript, calling a GraphQL endpoint:"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-js"},"module.exports = async function (app, opts) {\n app.post('/', async (request, reply) => {\n const res = await app.myclient.graphql({\n query: 'query { hello }'\n })\n return res\n })\n}\n")),(0,l.kt)("p",null,"or in Typescript, calling an OpenAPI endpoint:"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-ts"},"import { FastifyInstance } from 'fastify'\n/// \n\nexport default async function (app: FastifyInstance) {\n app.get('/', async () => {\n return app.myclient.get({})\n })\n}\n")),(0,l.kt)("p",null,"Options:"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"-c, --config ")," - Path to the configuration file."),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"-n, --name ")," - Name of the client."),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"-f, --folder ")," - Name of the plugin folder, defaults to --name value."),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"-t, --typescript")," - Generate the client plugin in TypeScript."),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--full-response")," - Client will return full response object rather than just the body."),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--full-request")," - Client will be called with all parameters wrapped in ",(0,l.kt)("inlineCode",{parentName:"li"},"body"),", ",(0,l.kt)("inlineCode",{parentName:"li"},"headers")," and ",(0,l.kt)("inlineCode",{parentName:"li"},"query")," properties."),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--full")," - Enables both ",(0,l.kt)("inlineCode",{parentName:"li"},"--full-request")," and ",(0,l.kt)("inlineCode",{parentName:"li"},"--full-response")," overriding them."),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--optional-headers ")," - Comma separated string of headers that will be marked as optional in the type file")),(0,l.kt)("h3",{id:"composer"},"composer"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"platformatic composer \n")),(0,l.kt)("h4",{id:"help-2"},"help"),(0,l.kt)("p",null,"Available commands:"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"help")," - show this help message."),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"help ")," - shows more information about a command."),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"start")," - start the server."),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"openapi schemas fetch")," - fetch OpenAPI schemas from services.")),(0,l.kt)("h4",{id:"openapi-schemas-fetch"},"openapi schemas fetch"),(0,l.kt)("p",null,"Fetch OpenAPI schemas from remote services to use in your Platformatic project."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"}," $ platformatic composer openapi schemas fetch\n")),(0,l.kt)("p",null,"It will fetch all the schemas from the remote services and store them by path\nset in the ",(0,l.kt)("inlineCode",{parentName:"p"},"platformatic.composer.json")," file. If the path is not set, it will\nskip fetching the schema."),(0,l.kt)("h4",{id:"start-1"},"start"),(0,l.kt)("p",null,"Start the Platformatic Composer server with the following command:"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"}," $ platformatic composer start\n")),(0,l.kt)("p",null,"You will need a configuration file. Here is an example to get you started,\nsave the following as ",(0,l.kt)("inlineCode",{parentName:"p"},"platformatic.composer.json"),":"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-json"},' {\n "server": {\n "hostname": "127.0.0.1",\n "port": 0,\n "logger": {\n "level": "info"\n }\n },\n "composer": {\n "services": [\n {\n "id": "service1",\n "origin": "http://127.0.0.1:3051",\n "openapi": {\n "url": "/documentation/json"\n }\n },\n {\n "id": "service2",\n "origin": "http://127.0.0.1:3052",\n "openapi": {\n "file": "./schemas/service2.openapi.json"\n }\n }\n ],\n "refreshTimeout": 1000\n }\n }\n')),(0,l.kt)("p",null,"By sending the SIGUSR2 signal, the server can be reloaded."),(0,l.kt)("p",null,"Options:"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"-c, --config FILE")," - Specify a configuration file to use.")),(0,l.kt)("p",null,"If not specified, the configuration will be loaded from any of the following, in the current directory."),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"platformatic.composer.json"),", or"),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"platformatic.composer.yml"),", or "),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"platformatic.composer.tml"))),(0,l.kt)("p",null,"You can find more details about the configuration format here:"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("a",{parentName:"li",href:"https://docs.platformatic.dev/docs/reference/composer/configuration"},"Platformatic Composer Configuration"))),(0,l.kt)("h3",{id:"db"},"db"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"platformatic db \n")),(0,l.kt)("h4",{id:"compile-1"},"compile"),(0,l.kt)("p",null,"Compile typescript plugins."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"}," $ platformatic db compile\n")),(0,l.kt)("p",null,"As a result of executing this command, the Platformatic DB will compile typescript\nplugins in the ",(0,l.kt)("inlineCode",{parentName:"p"},"outDir")," directory. "),(0,l.kt)("p",null,"If not specified, the configuration will be loaded from any of the following, in the current directory."),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"platformatic.db.json"),", or"),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"platformatic.db.yml"),", or "),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"platformatic.db.tml"))),(0,l.kt)("p",null,"You can find more details about the configuration format here:"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("a",{parentName:"li",href:"https://docs.platformatic.dev/docs/reference/db/configuration"},"Platformatic DB Configuration"))),(0,l.kt)("h4",{id:"help-3"},"help"),(0,l.kt)("p",null,"Available commands:"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"help")," - show this help message."),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"help ")," - shows more information about a command."),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"start")," - start the server."),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"compile")," - compile typescript plugins."),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"seed")," - run a seed file."),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"types")," - generate typescript types for entities."),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"schema")," - generate and print api schema."),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"migrations create")," - generate do and undo migration files."),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"migrations apply")," - apply migration files.")),(0,l.kt)("h4",{id:"migrations-apply"},"migrations apply"),(0,l.kt)("p",null,"Apply all configured migrations to the database:"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"}," $ platformatic db migrations apply\n")),(0,l.kt)("p",null,"The migrations will be applied in the order they are specified in the\nfolder defined in the configuration file. If you want to apply a specific migration,\nyou can use the ",(0,l.kt)("inlineCode",{parentName:"p"},"--to")," option:"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"}," $ platformatic db migrations apply --to 001\n")),(0,l.kt)("p",null,"Here is an example migration:"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-sql"}," CREATE TABLE graphs (\n id SERIAL PRIMARY KEY,\n name TEXT\n );\n")),(0,l.kt)("p",null,"You can always rollback to a specific migration with:"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"}," $ platformatic db migrations apply --to VERSION\n")),(0,l.kt)("p",null,"Use 000 to reset to the initial state."),(0,l.kt)("p",null,"Options:"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"-c, --config ")," - Path to the configuration file."),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"-t, --to ")," - Migrate to a specific version.")),(0,l.kt)("p",null,"If not specified, the configuration will be loaded from any of the following, in the current directory."),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"platformatic.db.json"),", or"),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"platformatic.db.yml"),", or "),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"platformatic.db.tml"))),(0,l.kt)("p",null,"You can find more details about the configuration format here:"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("a",{parentName:"li",href:"https://docs.platformatic.dev/docs/reference/db/configuration"},"Platformatic DB Configuration"))),(0,l.kt)("h4",{id:"migrations-create"},"migrations create"),(0,l.kt)("p",null,"Create next migration files."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"}," $ platformatic db migrations create\n")),(0,l.kt)("p",null,"It will generate do and undo sql files in the migrations folder. The name of the\nfiles will be the next migration number."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},' $ platformatic db migrations create --name "create_users_table"\n')),(0,l.kt)("p",null,"Options:"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"-c, --config ")," - Path to the configuration file.")),(0,l.kt)("p",null,"If not specified, the configuration will be loaded from any of the following, in the current directory."),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"platformatic.db.json"),", or"),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"platformatic.db.yml"),", or "),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"platformatic.db.tml"))),(0,l.kt)("p",null,"You can find more details about the configuration format here:"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("a",{parentName:"li",href:"https://docs.platformatic.dev/docs/reference/db/configuration"},"Platformatic DB Configuration"))),(0,l.kt)("h4",{id:"migrations"},"migrations"),(0,l.kt)("p",null,"Available commands:"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"migrations create")," - generate do and undo migration files."),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"migrations apply")," - apply migration files.")),(0,l.kt)("h4",{id:"schema"},"schema"),(0,l.kt)("p",null,"Update the config schema file:"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"schema config")," - update the JSON schema config available on ",(0,l.kt)("inlineCode",{parentName:"li"},"platformatic.db.schema.json"))),(0,l.kt)("p",null,"Your configuration on ",(0,l.kt)("inlineCode",{parentName:"p"},"platformatic.db.json")," has a schema defined to improve the developer experience and avoid mistakes when updating the configuration of Platformatic DB.\nWhen you run ",(0,l.kt)("inlineCode",{parentName:"p"},"platformatic db init"),", a new JSON ",(0,l.kt)("inlineCode",{parentName:"p"},"$schema")," property is added in ",(0,l.kt)("inlineCode",{parentName:"p"},"platformatic.db.schema.json"),". This can allow your IDE to add suggestions (f.e. mandatory/missing fields, types, default values) by opening the config in ",(0,l.kt)("inlineCode",{parentName:"p"},"platformatic.db.json"),".\nRunning ",(0,l.kt)("inlineCode",{parentName:"p"},"platformatic db schema config")," you can update your schema so that it matches well the latest changes available on your config."),(0,l.kt)("p",null,"Generate a schema from the database and prints it to standard output:"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"schema graphql")," - generate the GraphQL schema"),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"schema openapi")," - generate the OpenAPI schema")),(0,l.kt)("p",null,"Options:"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"-c, --config FILE")," - Specify a configuration file to use.")),(0,l.kt)("p",null,"If not specified, the configuration will be loaded from any of the following, in the current directory."),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"platformatic.db.json"),", or"),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"platformatic.db.yml"),", or "),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"platformatic.db.tml"))),(0,l.kt)("p",null,"You can find more details about the configuration format here:"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("a",{parentName:"li",href:"https://docs.platformatic.dev/docs/reference/db/configuration"},"Platformatic DB Configuration"))),(0,l.kt)("h4",{id:"seed"},"seed"),(0,l.kt)("p",null,"Load a seed into the database. This is a convenience method that loads\na JavaScript file and configure @platformatic/sql-mapper to connect to\nthe database specified in the configuration file."),(0,l.kt)("p",null,"Here is an example of a seed file:"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-javascript"}," 'use strict'\n\n module.exports = async function ({ entities, db, sql }) {\n await entities.graph.save({ input: { name: 'Hello' } })\n await db.query(sql`\n INSERT INTO graphs (name) VALUES ('Hello 2');\n `)\n }\n")),(0,l.kt)("p",null,"You can run this using the ",(0,l.kt)("inlineCode",{parentName:"p"},"seed")," command:"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"}," $ platformatic db seed seed.js\n")),(0,l.kt)("p",null,"Options:"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--config")," - Path to the configuration file.")),(0,l.kt)("p",null,"If not specified, the configuration will be loaded from any of the following, in the current directory."),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"platformatic.db.json"),", or"),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"platformatic.db.yml"),", or "),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"platformatic.db.tml"))),(0,l.kt)("p",null,"You can find more details about the configuration format here:"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("a",{parentName:"li",href:"https://docs.platformatic.dev/docs/reference/db/configuration"},"Platformatic DB Configuration"))),(0,l.kt)("h4",{id:"start-2"},"start"),(0,l.kt)("p",null,"Start the Platformatic DB server with the following command:"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"}," $ platformatic db start\n")),(0,l.kt)("p",null,"You will need a configuration file. Here is an example to get you started,\nsave the following as ",(0,l.kt)("inlineCode",{parentName:"p"},"platformatic.db.json"),":"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-json"},' {\n "server": {\n "hostname": "127.0.0.1",\n "port": 0,\n "logger": {\n "level": "info"\n }\n },\n "db": {\n "connectionString": "sqlite://./db"\n },\n "migrations": {\n "dir": "./migrations"\n }\n }\n')),(0,l.kt)("p",null,"Remember to create a migration, run the ",(0,l.kt)("inlineCode",{parentName:"p"},"db help migrate")," command to know more."),(0,l.kt)("p",null,"All outstanding migrations will be applied to the database unless the\n",(0,l.kt)("inlineCode",{parentName:"p"},"migrations.autoApply")," configuration option is set to false."),(0,l.kt)("p",null,"By sending the SIGUSR2 signal, the server can be reloaded."),(0,l.kt)("p",null,"Options:"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"-c, --config FILE")," - Specify a configuration file to use.")),(0,l.kt)("p",null,"If not specified, the configuration will be loaded from any of the following, in the current directory."),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"platformatic.db.json"),", or"),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"platformatic.db.yml"),", or "),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"platformatic.db.tml"))),(0,l.kt)("p",null,"You can find more details about the configuration format here:"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("a",{parentName:"li",href:"https://docs.platformatic.dev/docs/reference/db/configuration"},"Platformatic DB Configuration"))),(0,l.kt)("h4",{id:"types"},"types"),(0,l.kt)("p",null,"Generate typescript types for your entities from the database."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"}," $ platformatic db types\n")),(0,l.kt)("p",null,"As a result of executing this command, the Platformatic DB will generate a ",(0,l.kt)("inlineCode",{parentName:"p"},"types"),"\nfolder with a typescript file for each database entity. It will also generate a\n",(0,l.kt)("inlineCode",{parentName:"p"},"global.d.ts")," file that injects the types into the Application instance."),(0,l.kt)("p",null,"In order to add type support to your plugins, you need to install some additional\ndependencies. To do this, copy and run an ",(0,l.kt)("inlineCode",{parentName:"p"},"npm install"),' command with dependencies\nthat "platformatic db types" will ask you.'),(0,l.kt)("p",null,"Here is an example of a platformatic plugin.js with jsdoc support.\nYou can use it to add autocomplete to your code."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-javascript"},"/// \n'use strict'\n\n/** @param {import('fastify').FastifyInstance} app */\nmodule.exports = async function (app) {\n app.get('/movie', async () => {\n const movies = await app.platformatic.entities.movie.find({\n where: { title: { eq: 'The Hitchhiker\\'s Guide to the Galaxy' } }\n })\n return movies[0].id\n })\n}\n")),(0,l.kt)("p",null,"If not specified, the configuration will be loaded from any of the following, in the current directory."),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"platformatic.db.json"),", or"),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"platformatic.db.yml"),", or "),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"platformatic.db.tml"))),(0,l.kt)("p",null,"You can find more details about the configuration format here:"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("a",{parentName:"li",href:"https://docs.platformatic.dev/docs/reference/db/configuration"},"Platformatic DB Configuration"))),(0,l.kt)("h3",{id:"service"},"service"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"platformatic service \n")),(0,l.kt)("h4",{id:"compile-2"},"compile"),(0,l.kt)("p",null,"Compile typescript plugins."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"}," $ platformatic service compile\n")),(0,l.kt)("p",null,"As a result of executing this command, Platformatic Service will compile typescript\nplugins in the ",(0,l.kt)("inlineCode",{parentName:"p"},"outDir")," directory. "),(0,l.kt)("p",null,"If not specified, the configuration will be loaded from any of the following, in the current directory."),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"platformatic.service.json"),", or"),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"platformatic.service.yml"),", or "),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"platformatic.service.tml"))),(0,l.kt)("p",null,"You can find more details about the configuration format here:"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("a",{parentName:"li",href:"https://docs.platformatic.dev/docs/reference/service/configuration"},"Platformatic Service Configuration"))),(0,l.kt)("h4",{id:"help-4"},"help"),(0,l.kt)("p",null,"Available commands:"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"help")," - show this help message."),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"help ")," - shows more information about a command."),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"start")," - start the server."),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"schema config")," - generate the schema configuration file.")),(0,l.kt)("h4",{id:"schema-1"},"schema"),(0,l.kt)("p",null,"Update the config schema file:"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"schema config")," - update the JSON schema config available on ",(0,l.kt)("inlineCode",{parentName:"li"},"platformatic.service.schema.json"))),(0,l.kt)("p",null,"Your configuration on ",(0,l.kt)("inlineCode",{parentName:"p"},"platformatic.service.json")," has a schema defined to improve the developer experience and avoid mistakes when updating the configuration of Platformatic Service.\nWhen you initialize a new Platformatic service (f.e. running ",(0,l.kt)("inlineCode",{parentName:"p"},"npm create platformatic@latest"),"), a new JSON ",(0,l.kt)("inlineCode",{parentName:"p"},"$schema")," property is added in the ",(0,l.kt)("inlineCode",{parentName:"p"},"platformatic.service.json")," config. This can allow your IDE to add suggestions (f.e. mandatory/missing fields, types, default values) by opening the config in ",(0,l.kt)("inlineCode",{parentName:"p"},"platformatic.service.json"),".\nRunning ",(0,l.kt)("inlineCode",{parentName:"p"},"platformatic service schema config")," you can update your schema so that it matches well the latest changes available on your config."),(0,l.kt)("h4",{id:"start-3"},"start"),(0,l.kt)("p",null,"Start the Platformatic Service with the following command:"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"}," $ platformatic service start\n")),(0,l.kt)("p",null,"You will need a configuration file. Here is an example to get you started,\nsave the following as ",(0,l.kt)("inlineCode",{parentName:"p"},"platformatic.service.json"),":"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-json"},'{\n "server": {\n "hostname": "127.0.0.1",\n "port": 0,\n "logger": {\n "level": "info"\n }\n },\n "plugin": {\n "path": "./plugin.js"\n }\n}\n')),(0,l.kt)("h3",{id:"frontend"},"frontend"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"platformatic frontend \n")),(0,l.kt)("p",null,"Create frontend code to consume the REST APIs of a Platformatic application."),(0,l.kt)("p",null,"From the directory you want the frontend code to be generated (typically ",(0,l.kt)("inlineCode",{parentName:"p"},"/src/"),") run -"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"npx platformatic frontend http://127.0.0.1:3042 ts\n")),(0,l.kt)("blockquote",null,(0,l.kt)("p",{parentName:"blockquote"},"\u2139\ufe0f"),(0,l.kt)("p",{parentName:"blockquote"},"Where ",(0,l.kt)("inlineCode",{parentName:"p"},"http://127.0.0.1:3042")," must be replaced with your Platformatic application endpoint, and the language can either be ",(0,l.kt)("inlineCode",{parentName:"p"},"ts")," or ",(0,l.kt)("inlineCode",{parentName:"p"},"js"),". When the command is run, the Platformatic CLI generates -"),(0,l.kt)("ul",{parentName:"blockquote"},(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"api.d.ts")," - A TypeScript module that includes all the OpenAPI-related types."),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"api.ts")," or ",(0,l.kt)("inlineCode",{parentName:"li"},"api.js")," - A module that includes a function for every single REST endpoint."))),(0,l.kt)("p",null,"If you use the ",(0,l.kt)("inlineCode",{parentName:"p"},"--name")," option it will create custom file names."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"npx platformatic frontend http://127.0.0.1:3042 ts --name foobar\n")),(0,l.kt)("p",null,"Will create ",(0,l.kt)("inlineCode",{parentName:"p"},"foobar.ts")," and ",(0,l.kt)("inlineCode",{parentName:"p"},"foobar-types.d.ts")),(0,l.kt)("p",null,"Refer to the ",(0,l.kt)("a",{parentName:"p",href:"https://docs.platformatic.dev/docs/guides/generate-frontend-code-to-consume-platformatic-rest-api"},"dedicated guide")," where the full process of generating and consuming the frontend code is described."),(0,l.kt)("p",null,"In case of problems, please check that:"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},"The Platformatic app URL is valid."),(0,l.kt)("li",{parentName:"ul"},"The Platformatic app whose URL belongs must be up and running."),(0,l.kt)("li",{parentName:"ul"},"OpenAPI must be enabled (",(0,l.kt)("inlineCode",{parentName:"li"},"db.openapi")," in your ",(0,l.kt)("inlineCode",{parentName:"li"},"platformatic.db.json")," is not set to ",(0,l.kt)("inlineCode",{parentName:"li"},"false"),"). You can find more details about the db configuration format ",(0,l.kt)("a",{parentName:"li",href:"https://docs.platformatic.dev/docs/reference/db/configuration/#db"},"here"),"."),(0,l.kt)("li",{parentName:"ul"},"CORS must be managed in your Platformatic app (",(0,l.kt)("inlineCode",{parentName:"li"},"server.cors.origin.regexp")," in your ",(0,l.kt)("inlineCode",{parentName:"li"},"platformatic.db.json")," is set to ",(0,l.kt)("inlineCode",{parentName:"li"},"/*/"),", for instance). You can find more details about the cors configuration ",(0,l.kt)("a",{parentName:"li",href:"https://docs.platformatic.dev/docs/reference/service/configuration/#server"},"here"),".")),(0,l.kt)("h3",{id:"runtime"},"runtime"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"platformatic runtime \n")),(0,l.kt)("h4",{id:"compile-3"},"compile"),(0,l.kt)("p",null,"Compile all typescript plugins for all services."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"}," $ platformatic runtime compile\n")),(0,l.kt)("p",null,"This command will compile the TypeScript\nplugins for each services registered in the runtime."),(0,l.kt)("h4",{id:"help-5"},"help"),(0,l.kt)("p",null,"Available commands:"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"help")," - show this help message."),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"help ")," - shows more information about a command."),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"start")," - start the application.")),(0,l.kt)("h4",{id:"start-4"},"start"),(0,l.kt)("p",null,"Start the Platformatic Runtime with the following command:"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"}," $ platformatic runtime start\n")),(0,l.kt)("h3",{id:"start-5"},"start"),(0,l.kt)("p",null,"Start a Platformatic application with the following command:"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"$ platformatic start\n")),(0,l.kt)("p",null,"Options:"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"-c, --config ")," - Path to the configuration file."),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--inspect[=[host:]port]")," - Start the Node.js debugger. ",(0,l.kt)("inlineCode",{parentName:"li"},"host")," defaults to ",(0,l.kt)("inlineCode",{parentName:"li"},"'127.0.0.1'"),". ",(0,l.kt)("inlineCode",{parentName:"li"},"port")," defaults to 9229. Use caution when binding to a public host:port combination."),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("inlineCode",{parentName:"li"},"--inspect-brk[=[host:]port]")," - Start the Node.js debugger and block until a client has attached. ",(0,l.kt)("inlineCode",{parentName:"li"},"host")," defaults to ",(0,l.kt)("inlineCode",{parentName:"li"},"'127.0.0.1'"),". ",(0,l.kt)("inlineCode",{parentName:"li"},"port")," defaults to 9229. Use caution when binding to a public host:port combination.")))}f.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/0a3210c9.8ab58837.js b/assets/js/0a3210c9.8ab58837.js new file mode 100644 index 00000000000..45c12e0b619 --- /dev/null +++ b/assets/js/0a3210c9.8ab58837.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkplatformatic_oss_website=self.webpackChunkplatformatic_oss_website||[]).push([[96101,72],{3905:(e,t,a)=>{a.d(t,{Zo:()=>p,kt:()=>f});var n=a(67294);function r(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function o(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function i(e){for(var t=1;t=0||(r[a]=e[a]);return r}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(r[a]=e[a])}return r}var s=n.createContext({}),c=function(e){var t=n.useContext(s),a=t;return e&&(a="function"==typeof e?e(t):i(i({},t),e)),a},p=function(e){var t=c(e.components);return n.createElement(s.Provider,{value:t},e.children)},u="mdxType",m={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},d=n.forwardRef((function(e,t){var a=e.components,r=e.mdxType,o=e.originalType,s=e.parentName,p=l(e,["components","mdxType","originalType","parentName"]),u=c(a),d=r,f=u["".concat(s,".").concat(d)]||u[d]||m[d]||o;return a?n.createElement(f,i(i({ref:t},p),{},{components:a})):n.createElement(f,i({ref:t},p))}));function f(e,t){var a=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var o=a.length,i=new Array(o);i[0]=d;var l={};for(var s in t)hasOwnProperty.call(t,s)&&(l[s]=t[s]);l.originalType=e,l[u]="string"==typeof e?e:r,i[1]=l;for(var c=2;c{a.d(t,{Z:()=>i});var n=a(67294),r=a(86010);const o={tabItem:"tabItem_Ymn6"};function i(e){let{children:t,hidden:a,className:i}=e;return n.createElement("div",{role:"tabpanel",className:(0,r.Z)(o.tabItem,i),hidden:a},t)}},74866:(e,t,a)=>{a.d(t,{Z:()=>w});var n=a(87462),r=a(67294),o=a(86010),i=a(12466),l=a(16550),s=a(91980),c=a(67392),p=a(50012);function u(e){return function(e){return r.Children.map(e,(e=>{if(!e||(0,r.isValidElement)(e)&&function(e){const{props:t}=e;return!!t&&"object"==typeof t&&"value"in t}(e))return e;throw new Error(`Docusaurus error: Bad child <${"string"==typeof e.type?e.type:e.type.name}>: all children of the component should be , and every should have a unique "value" prop.`)}))?.filter(Boolean)??[]}(e).map((e=>{let{props:{value:t,label:a,attributes:n,default:r}}=e;return{value:t,label:a,attributes:n,default:r}}))}function m(e){const{values:t,children:a}=e;return(0,r.useMemo)((()=>{const e=t??u(a);return function(e){const t=(0,c.l)(e,((e,t)=>e.value===t.value));if(t.length>0)throw new Error(`Docusaurus error: Duplicate values "${t.map((e=>e.value)).join(", ")}" found in . Every value needs to be unique.`)}(e),e}),[t,a])}function d(e){let{value:t,tabValues:a}=e;return a.some((e=>e.value===t))}function f(e){let{queryString:t=!1,groupId:a}=e;const n=(0,l.k6)(),o=function(e){let{queryString:t=!1,groupId:a}=e;if("string"==typeof t)return t;if(!1===t)return null;if(!0===t&&!a)throw new Error('Docusaurus error: The component groupId prop is required if queryString=true, because this value is used as the search param name. You can also provide an explicit value such as queryString="my-search-param".');return a??null}({queryString:t,groupId:a});return[(0,s._X)(o),(0,r.useCallback)((e=>{if(!o)return;const t=new URLSearchParams(n.location.search);t.set(o,e),n.replace({...n.location,search:t.toString()})}),[o,n])]}function h(e){const{defaultValue:t,queryString:a=!1,groupId:n}=e,o=m(e),[i,l]=(0,r.useState)((()=>function(e){let{defaultValue:t,tabValues:a}=e;if(0===a.length)throw new Error("Docusaurus error: the component requires at least one children component");if(t){if(!d({value:t,tabValues:a}))throw new Error(`Docusaurus error: The has a defaultValue "${t}" but none of its children has the corresponding value. Available values are: ${a.map((e=>e.value)).join(", ")}. If you intend to show no default tab, use defaultValue={null} instead.`);return t}const n=a.find((e=>e.default))??a[0];if(!n)throw new Error("Unexpected error: 0 tabValues");return n.value}({defaultValue:t,tabValues:o}))),[s,c]=f({queryString:a,groupId:n}),[u,h]=function(e){let{groupId:t}=e;const a=function(e){return e?`docusaurus.tab.${e}`:null}(t),[n,o]=(0,p.Nk)(a);return[n,(0,r.useCallback)((e=>{a&&o.set(e)}),[a,o])]}({groupId:n}),g=(()=>{const e=s??u;return d({value:e,tabValues:o})?e:null})();(0,r.useLayoutEffect)((()=>{g&&l(g)}),[g]);return{selectedValue:i,selectValue:(0,r.useCallback)((e=>{if(!d({value:e,tabValues:o}))throw new Error(`Can't select invalid tab value=${e}`);l(e),c(e),h(e)}),[c,h,o]),tabValues:o}}var g=a(72389);const k={tabList:"tabList__CuJ",tabItem:"tabItem_LNqP"};function y(e){let{className:t,block:a,selectedValue:l,selectValue:s,tabValues:c}=e;const p=[],{blockElementScrollPositionUntilNextRender:u}=(0,i.o5)(),m=e=>{const t=e.currentTarget,a=p.indexOf(t),n=c[a].value;n!==l&&(u(t),s(n))},d=e=>{let t=null;switch(e.key){case"Enter":m(e);break;case"ArrowRight":{const a=p.indexOf(e.currentTarget)+1;t=p[a]??p[0];break}case"ArrowLeft":{const a=p.indexOf(e.currentTarget)-1;t=p[a]??p[p.length-1];break}}t?.focus()};return r.createElement("ul",{role:"tablist","aria-orientation":"horizontal",className:(0,o.Z)("tabs",{"tabs--block":a},t)},c.map((e=>{let{value:t,label:a,attributes:i}=e;return r.createElement("li",(0,n.Z)({role:"tab",tabIndex:l===t?0:-1,"aria-selected":l===t,key:t,ref:e=>p.push(e),onKeyDown:d,onClick:m},i,{className:(0,o.Z)("tabs__item",k.tabItem,i?.className,{"tabs__item--active":l===t})}),a??t)})))}function b(e){let{lazy:t,children:a,selectedValue:n}=e;const o=(Array.isArray(a)?a:[a]).filter(Boolean);if(t){const e=o.find((e=>e.props.value===n));return e?(0,r.cloneElement)(e,{className:"margin-top--md"}):null}return r.createElement("div",{className:"margin-top--md"},o.map(((e,t)=>(0,r.cloneElement)(e,{key:t,hidden:e.props.value!==n}))))}function v(e){const t=h(e);return r.createElement("div",{className:(0,o.Z)("tabs-container",k.tabList)},r.createElement(y,(0,n.Z)({},e,t)),r.createElement(b,(0,n.Z)({},e,t)))}function w(e){const t=(0,g.Z)();return r.createElement(v,(0,n.Z)({key:String(t)},e))}},76141:(e,t,a)=>{a.r(t),a.d(t,{assets:()=>p,contentTitle:()=>s,default:()=>f,frontMatter:()=>l,metadata:()=>c,toc:()=>u});var n=a(87462),r=(a(67294),a(3905)),o=a(74866),i=a(85162);const l={},s=void 0,c={unversionedId:"getting-started/new-api-project-instructions",id:"version-0.42.0/getting-started/new-api-project-instructions",title:"new-api-project-instructions",description:"Run this command in your terminal to start the Platformatic creator wizard:",source:"@site/versioned_docs/version-0.42.0/getting-started/new-api-project-instructions.md",sourceDirName:"getting-started",slug:"/getting-started/new-api-project-instructions",permalink:"/docs/0.42.0/getting-started/new-api-project-instructions",draft:!1,editUrl:"https://github.com/platformatic/oss/edit/main/versioned_docs/version-0.42.0/getting-started/new-api-project-instructions.md",tags:[],version:"0.42.0",frontMatter:{}},p={},u=[],m={toc:u},d="wrapper";function f(e){let{components:t,...a}=e;return(0,r.kt)(d,(0,n.Z)({},m,a,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("p",null,"Run this command in your terminal to start the Platformatic creator wizard:"),(0,r.kt)(o.Z,{groupId:"package-manager-create",mdxType:"Tabs"},(0,r.kt)(i.Z,{value:"npm",label:"npm",mdxType:"TabItem"},(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"npm create platformatic@latest\n"))),(0,r.kt)(i.Z,{value:"yarn",label:"yarn",mdxType:"TabItem"},(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"yarn create platformatic\n"))),(0,r.kt)(i.Z,{value:"pnpm",label:"pnpm",mdxType:"TabItem"},(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"pnpm create platformatic@latest\n")))),(0,r.kt)("p",null,"This interactive command-line tool will ask you some questions about how you'd\nlike to set up your new Platformatic project. For this guide, select these options:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre"},"- Which kind of project do you want to create? => DB\n- Where would you like to create your project? => quick-start\n- Do you want to create default migrations? => Yes\n- Do you want to create a plugin? => Yes\n- Do you want to use TypeScript? => No\n- Do you want to install dependencies? => Yes (this can take a while)\n- Do you want to apply the migrations? => Yes\n- Do you want to generate types? => Yes\n- Do you want to create the github action to deploy this application to Platformatic Cloud dynamic workspace? => No\n- Do you want to create the github action to deploy this application to Platformatic Cloud static workspace? => No\n")),(0,r.kt)("p",null,"Once the wizard is complete, you'll have a Platformatic app project in the\nfolder ",(0,r.kt)("inlineCode",{parentName:"p"},"quick-start"),", with example migration files and a plugin script."),(0,r.kt)("admonition",{type:"info"},(0,r.kt)("p",{parentName:"admonition"},"Make sure you run the npm/yarn/pnpm command ",(0,r.kt)("inlineCode",{parentName:"p"},"install")," command manually if you\ndon't ask the wizard to do it for you.")))}f.isMDXComponent=!0},77631:(e,t,a)=>{a.r(t),a.d(t,{assets:()=>c,contentTitle:()=>l,default:()=>d,frontMatter:()=>i,metadata:()=>s,toc:()=>p});var n=a(87462),r=(a(67294),a(3905)),o=a(76141);const i={},l="Dockerize a Platformatic App",s={unversionedId:"guides/dockerize-platformatic-app",id:"version-0.42.0/guides/dockerize-platformatic-app",title:"Dockerize a Platformatic App",description:"This guide explains how to create a new Platformatic DB app, which connects to a PostgreSQL database.",source:"@site/versioned_docs/version-0.42.0/guides/dockerize-platformatic-app.md",sourceDirName:"guides",slug:"/guides/dockerize-platformatic-app",permalink:"/docs/0.42.0/guides/dockerize-platformatic-app",draft:!1,editUrl:"https://github.com/platformatic/oss/edit/main/versioned_docs/version-0.42.0/guides/dockerize-platformatic-app.md",tags:[],version:"0.42.0",frontMatter:{},sidebar:"docs",previous:{title:"Telemetry with Jaeger",permalink:"/docs/0.42.0/guides/telemetry"},next:{title:"Reference",permalink:"/docs/0.42.0/category/reference"}},c={},p=[{value:"Generate a Platformatic DB App",id:"generate-a-platformatic-db-app",level:2},{value:"Create Docker image for the Platformatic DB App",id:"create-docker-image-for-the-platformatic-db-app",level:2},{value:"Create Docker Compose config file",id:"create-docker-compose-config-file",level:2}],u={toc:p},m="wrapper";function d(e){let{components:t,...a}=e;return(0,r.kt)(m,(0,n.Z)({},u,a,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("h1",{id:"dockerize-a-platformatic-app"},"Dockerize a Platformatic App"),(0,r.kt)("p",null,"This guide explains how to create a new Platformatic DB app, which connects to a PostgreSQL database."),(0,r.kt)("p",null,"We will then create a ",(0,r.kt)("inlineCode",{parentName:"p"},"docker-compose.yml")," file that will run both services in separate containers "),(0,r.kt)("h2",{id:"generate-a-platformatic-db-app"},"Generate a Platformatic DB App"),(0,r.kt)(o.default,{mdxType:"NewApiProjectInstructions"}),(0,r.kt)("h2",{id:"create-docker-image-for-the-platformatic-db-app"},"Create Docker image for the Platformatic DB App"),(0,r.kt)("p",null,"In this step you are going to create some files into the root project directory"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},".dockerignore")," - This file tells Docker to ignore some files when copying the directory into the image filesystem")),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre"},"node_modules\n.env*\n")),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"start.sh")," - This is our entrypoint. We will run migrations then start platformatic")),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-sh"},'#!/bin/sh\n\necho "Running migrations..." && \\\nnpx platformatic db migrations apply && \\\necho "Starting Platformatic App..." && \\\nnpm start\n')),(0,r.kt)("admonition",{type:"info"},(0,r.kt)("p",{parentName:"admonition"},"Make sure you make this file executable with the command ",(0,r.kt)("inlineCode",{parentName:"p"},"chmod +x start.sh"))),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"Dockerfile")," - This is the file Docker uses to create the image")),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre"},'FROM node:18-alpine\nWORKDIR /usr/src/app\nCOPY . .\nRUN npm install\nCOPY . .\nEXPOSE 3042\nCMD [ "./start.sh" ]\n')),(0,r.kt)("p",null,"At this point you can build your Docker image with the command"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"$ docker build -t platformatic-app .\n")),(0,r.kt)("h2",{id:"create-docker-compose-config-file"},"Create Docker Compose config file"),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"docker-compose.yml")," is the configuration file for ",(0,r.kt)("inlineCode",{parentName:"p"},"docker-compose")," which will spin up containers for both PostgresSQL and our Platformatic App"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-yml"},'version: "3.3"\nservices:\n postgresql:\n ports:\n - "5433:5432"\n image: "postgres:15-alpine"\n environment:\n - POSTGRES_PASSWORD=postgres\n platformatic:\n ports:\n - "3042:3042"\n image: \'platformatic-app:latest\'\n depends_on:\n - postgresql\n links:\n - postgresql\n environment:\n PLT_SERVER_HOSTNAME: ${PLT_SERVER_HOSTNAME}\n PORT: ${PORT}\n PLT_SERVER_LOGGER_LEVEL: ${PLT_SERVER_LOGGER_LEVEL}\n DATABASE_URL: postgres://postgres:postgres@postgresql:5432/postgres\n')),(0,r.kt)("p",null,"A couple of things to notice:"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"The Platformatic app is started only once the database container is up and running (",(0,r.kt)("inlineCode",{parentName:"li"},"depends_on"),"). "),(0,r.kt)("li",{parentName:"ul"},"The Platformatic app is linked with ",(0,r.kt)("inlineCode",{parentName:"li"},"postgresql")," service. Meaning that inside its container ",(0,r.kt)("inlineCode",{parentName:"li"},"ping postgresql")," will be resolved with the internal ip of the database container."),(0,r.kt)("li",{parentName:"ul"},"The environment is taken directly from the ",(0,r.kt)("inlineCode",{parentName:"li"},".env")," file created by the wizard")),(0,r.kt)("p",null,"You can now run your containers with"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"$ docker-compose up # (-d if you want to send them in the background)\n")),(0,r.kt)("p",null,"Everything should start smoothly, and you can access your app pointing your browser to ",(0,r.kt)("inlineCode",{parentName:"p"},"http://0.0.0.0:3042")),(0,r.kt)("p",null,"To stop the app you can either press ",(0,r.kt)("inlineCode",{parentName:"p"},"CTRL-C")," if you are running them in the foreground, or, if you used the ",(0,r.kt)("inlineCode",{parentName:"p"},"-d")," flag, run"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"$ docker-compose down\n")))}d.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/0af809bc.90184971.js b/assets/js/0af809bc.90184971.js new file mode 100644 index 00000000000..06d047ffd35 --- /dev/null +++ b/assets/js/0af809bc.90184971.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkplatformatic_oss_website=self.webpackChunkplatformatic_oss_website||[]).push([[25007],{3905:(e,t,r)=>{r.d(t,{Zo:()=>u,kt:()=>f});var n=r(67294);function a(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function o(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function i(e){for(var t=1;t=0||(a[r]=e[r]);return a}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(a[r]=e[r])}return a}var s=n.createContext({}),c=function(e){var t=n.useContext(s),r=t;return e&&(r="function"==typeof e?e(t):i(i({},t),e)),r},u=function(e){var t=c(e.components);return n.createElement(s.Provider,{value:t},e.children)},d="mdxType",p={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},m=n.forwardRef((function(e,t){var r=e.components,a=e.mdxType,o=e.originalType,s=e.parentName,u=l(e,["components","mdxType","originalType","parentName"]),d=c(r),m=a,f=d["".concat(s,".").concat(m)]||d[m]||p[m]||o;return r?n.createElement(f,i(i({ref:t},u),{},{components:r})):n.createElement(f,i({ref:t},u))}));function f(e,t){var r=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var o=r.length,i=new Array(o);i[0]=m;var l={};for(var s in t)hasOwnProperty.call(t,s)&&(l[s]=t[s]);l.originalType=e,l[d]="string"==typeof e?e:a,i[1]=l;for(var c=2;c{r.r(t),r.d(t,{assets:()=>s,contentTitle:()=>i,default:()=>p,frontMatter:()=>o,metadata:()=>l,toc:()=>c});var n=r(87462),a=(r(67294),r(3905));const o={},i="Debug Platformatic DB",l={unversionedId:"guides/debug-platformatic-db",id:"version-0.41.1/guides/debug-platformatic-db",title:"Debug Platformatic DB",description:"Error: No tables found in the database",source:"@site/versioned_docs/version-0.41.1/guides/debug-platformatic-db.md",sourceDirName:"guides",slug:"/guides/debug-platformatic-db",permalink:"/docs/0.41.1/guides/debug-platformatic-db",draft:!1,editUrl:"https://github.com/platformatic/oss/edit/main/versioned_docs/version-0.41.1/guides/debug-platformatic-db.md",tags:[],version:"0.41.1",frontMatter:{},sidebar:"docs",previous:{title:"Monitoring with Prometheus and Grafana",permalink:"/docs/0.41.1/guides/monitoring"},next:{title:"Integrate Prisma with Platformatic DB",permalink:"/docs/0.41.1/guides/prisma"}},s={},c=[{value:"Error: No tables found in the database",id:"error-no-tables-found-in-the-database",level:2},{value:"Logging SQL queries",id:"logging-sql-queries",level:2}],u={toc:c},d="wrapper";function p(e){let{components:t,...r}=e;return(0,a.kt)(d,(0,n.Z)({},u,r,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h1",{id:"debug-platformatic-db"},"Debug Platformatic DB"),(0,a.kt)("h2",{id:"error-no-tables-found-in-the-database"},"Error: No tables found in the database"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"Verify your database connection string is correct in your Platformatic DB configuration",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"Make sure the database name is correct"))),(0,a.kt)("li",{parentName:"ul"},"Ensure that you have run the migration command ",(0,a.kt)("inlineCode",{parentName:"li"},"npx platformatic db migrations apply")," before starting the server. See the Platformatic DB ",(0,a.kt)("a",{parentName:"li",href:"https://docs.platformatic.dev/docs/reference/db/migrations"},"Migrations")," documentation for more information on working with migrations.")),(0,a.kt)("h2",{id:"logging-sql-queries"},"Logging SQL queries"),(0,a.kt)("p",null,"You can see all the queries that are being run against your database in your terminal by setting the logger level to trace in your ",(0,a.kt)("inlineCode",{parentName:"p"},"platformatic.db.json")," config file:"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-json",metastring:'title="platformatic.db.json"',title:'"platformatic.db.json"'},'{\n "server": {\n "logger": {\n "level": "trace"\n }\n }\n}\n')))}p.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/0b52e73a.2cb86126.js b/assets/js/0b52e73a.2cb86126.js new file mode 100644 index 00000000000..3df82bd80cf --- /dev/null +++ b/assets/js/0b52e73a.2cb86126.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkplatformatic_oss_website=self.webpackChunkplatformatic_oss_website||[]).push([[34923],{3905:(e,t,r)=>{r.d(t,{Zo:()=>s,kt:()=>d});var n=r(67294);function o(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function a(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function i(e){for(var t=1;t=0||(o[r]=e[r]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(o[r]=e[r])}return o}var l=n.createContext({}),p=function(e){var t=n.useContext(l),r=t;return e&&(r="function"==typeof e?e(t):i(i({},t),e)),r},s=function(e){var t=p(e.components);return n.createElement(l.Provider,{value:t},e.children)},u="mdxType",m={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},f=n.forwardRef((function(e,t){var r=e.components,o=e.mdxType,a=e.originalType,l=e.parentName,s=c(e,["components","mdxType","originalType","parentName"]),u=p(r),f=o,d=u["".concat(l,".").concat(f)]||u[f]||m[f]||a;return r?n.createElement(d,i(i({ref:t},s),{},{components:r})):n.createElement(d,i({ref:t},s))}));function d(e,t){var r=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var a=r.length,i=new Array(a);i[0]=f;var c={};for(var l in t)hasOwnProperty.call(t,l)&&(c[l]=t[l]);c.originalType=e,c[u]="string"==typeof e?e:o,i[1]=c;for(var p=2;p{r.r(t),r.d(t,{assets:()=>l,contentTitle:()=>i,default:()=>m,frontMatter:()=>a,metadata:()=>c,toc:()=>p});var n=r(87462),o=(r(67294),r(3905));const a={},i="Platformatic Composer",c={unversionedId:"reference/composer/introduction",id:"version-0.41.2/reference/composer/introduction",title:"Platformatic Composer",description:"Platformatic Composer is an HTTP server that automatically aggregates multiple",source:"@site/versioned_docs/version-0.41.2/reference/composer/introduction.md",sourceDirName:"reference/composer",slug:"/reference/composer/introduction",permalink:"/docs/0.41.2/reference/composer/introduction",draft:!1,editUrl:"https://github.com/platformatic/oss/edit/main/versioned_docs/version-0.41.2/reference/composer/introduction.md",tags:[],version:"0.41.2",frontMatter:{},sidebar:"docs",previous:{title:"Platformatic CLI",permalink:"/docs/0.41.2/reference/cli"},next:{title:"Platformatic Composer",permalink:"/docs/0.41.2/reference/composer/introduction"}},l={},p=[{value:"Features",id:"features",level:2},{value:"Public beta",id:"public-beta",level:2},{value:"Standalone usage",id:"standalone-usage",level:2},{value:"Example configuration file",id:"example-configuration-file",level:2}],s={toc:p},u="wrapper";function m(e){let{components:t,...r}=e;return(0,o.kt)(u,(0,n.Z)({},s,r,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h1",{id:"platformatic-composer"},"Platformatic Composer"),(0,o.kt)("p",null,"Platformatic Composer is an HTTP server that automatically aggregates multiple\nservices APIs into a single API."),(0,o.kt)("admonition",{type:"info"},(0,o.kt)("p",{parentName:"admonition"},"Platformatic Composer is currently in ",(0,o.kt)("a",{parentName:"p",href:"#public-beta"},"public beta"),".")),(0,o.kt)("h2",{id:"features"},"Features"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},"Command-line interface: ",(0,o.kt)("a",{parentName:"li",href:"/docs/0.41.2/reference/cli#composer"},(0,o.kt)("inlineCode",{parentName:"a"},"platformatic composer"))),(0,o.kt)("li",{parentName:"ul"},"Automatic ",(0,o.kt)("a",{parentName:"li",href:"/docs/0.41.2/reference/composer/configuration#composer"},"OpenApi composition")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"/docs/0.41.2/reference/composer/configuration#composer"},"Reverse proxy")," for composed services"),(0,o.kt)("li",{parentName:"ul"},"Add custom functionality in a ",(0,o.kt)("a",{parentName:"li",href:"/docs/0.41.2/reference/composer/plugin"},"Fastify plugin")),(0,o.kt)("li",{parentName:"ul"},"Write plugins in JavaScript or ",(0,o.kt)("a",{parentName:"li",href:"/docs/0.41.2/reference/cli#compile"},"TypeScript"))),(0,o.kt)("h2",{id:"public-beta"},"Public beta"),(0,o.kt)("p",null,"Platformatic Composer is in public beta. You can use it in production, but it's quite\nlikely that you'll encounter significant bugs."),(0,o.kt)("p",null,"If you run into a bug or have a suggestion for improvement, please\n",(0,o.kt)("a",{parentName:"p",href:"https://github.com/platformatic/platformatic/issues/new"},"raise an issue on GitHub"),"."),(0,o.kt)("h2",{id:"standalone-usage"},"Standalone usage"),(0,o.kt)("p",null,"If you're only interested in the features available in Platformatic Composer, you can replace ",(0,o.kt)("inlineCode",{parentName:"p"},"platformatic")," with ",(0,o.kt)("inlineCode",{parentName:"p"},"@platformatic/composer")," in the ",(0,o.kt)("inlineCode",{parentName:"p"},"dependencies")," of your ",(0,o.kt)("inlineCode",{parentName:"p"},"package.json"),", so that you'll import fewer deps."),(0,o.kt)("h2",{id:"example-configuration-file"},"Example configuration file"),(0,o.kt)("p",null,"The following configuration file can be used to start a new Platformatic\nComposer project. For more details on the configuration file, see the\n",(0,o.kt)("a",{parentName:"p",href:"/docs/0.41.2/reference/composer/configuration"},"configuration documentation"),"."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-json"},'{\n "$schema": "https://platformatic.dev/schemas/v0.26.0/composer",\n "server": {\n "hostname": "127.0.0.1",\n "port": 0,\n "logger": {\n "level": "info"\n }\n },\n "composer": {\n "services": [\n {\n "id": "auth-service",\n "origin": "https://auth-service.com",\n "openapi": {\n "url": "/documentation/json",\n "prefix": "auth"\n }\n },\n {\n "id": "payment-service",\n "origin": "https://payment-service.com",\n "openapi": {\n "url": "/documentation/json"\n }\n }\n ],\n "refreshTimeout": 1000\n },\n "watch": true\n}\n')))}m.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/0b6177e0.918ef7a1.js b/assets/js/0b6177e0.918ef7a1.js new file mode 100644 index 00000000000..a35a03ab64c --- /dev/null +++ b/assets/js/0b6177e0.918ef7a1.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkplatformatic_oss_website=self.webpackChunkplatformatic_oss_website||[]).push([[71956],{3905:(e,t,n)=>{n.d(t,{Zo:()=>c,kt:()=>f});var a=n(67294);function i(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function r(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);t&&(a=a.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,a)}return n}function l(e){for(var t=1;t=0||(i[n]=e[n]);return i}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(i[n]=e[n])}return i}var o=a.createContext({}),p=function(e){var t=a.useContext(o),n=t;return e&&(n="function"==typeof e?e(t):l(l({},t),e)),n},c=function(e){var t=p(e.components);return a.createElement(o.Provider,{value:t},e.children)},d="mdxType",m={inlineCode:"code",wrapper:function(e){var t=e.children;return a.createElement(a.Fragment,{},t)}},u=a.forwardRef((function(e,t){var n=e.components,i=e.mdxType,r=e.originalType,o=e.parentName,c=s(e,["components","mdxType","originalType","parentName"]),d=p(n),u=i,f=d["".concat(o,".").concat(u)]||d[u]||m[u]||r;return n?a.createElement(f,l(l({ref:t},c),{},{components:n})):a.createElement(f,l({ref:t},c))}));function f(e,t){var n=arguments,i=t&&t.mdxType;if("string"==typeof e||i){var r=n.length,l=new Array(r);l[0]=u;var s={};for(var o in t)hasOwnProperty.call(t,o)&&(s[o]=t[o]);s.originalType=e,s[d]="string"==typeof e?e:i,l[1]=s;for(var p=2;p{n.r(t),n.d(t,{assets:()=>o,contentTitle:()=>l,default:()=>m,frontMatter:()=>r,metadata:()=>s,toc:()=>p});var a=n(87462),i=(n(67294),n(3905));const r={},l="Fields",s={unversionedId:"reference/sql-mapper/entities/fields",id:"version-0.41.1/reference/sql-mapper/entities/fields",title:"Fields",description:"When Platformatic DB inspects a database's schema, it creates an object for each table that contains a mapping of their fields.",source:"@site/versioned_docs/version-0.41.1/reference/sql-mapper/entities/fields.md",sourceDirName:"reference/sql-mapper/entities",slug:"/reference/sql-mapper/entities/fields",permalink:"/docs/0.41.1/reference/sql-mapper/entities/fields",draft:!1,editUrl:"https://github.com/platformatic/oss/edit/main/versioned_docs/version-0.41.1/reference/sql-mapper/entities/fields.md",tags:[],version:"0.41.1",frontMatter:{},sidebar:"docs",previous:{title:"Introduction to Entities",permalink:"/docs/0.41.1/reference/sql-mapper/entities/introduction"},next:{title:"API",permalink:"/docs/0.41.1/reference/sql-mapper/entities/api"}},o={},p=[{value:"Fields detail",id:"fields-detail",level:2},{value:"Example",id:"example",level:2}],c={toc:p},d="wrapper";function m(e){let{components:t,...n}=e;return(0,i.kt)(d,(0,a.Z)({},c,n,{components:t,mdxType:"MDXLayout"}),(0,i.kt)("h1",{id:"fields"},"Fields"),(0,i.kt)("p",null,"When Platformatic DB inspects a database's schema, it creates an object for each table that contains a mapping of their fields."),(0,i.kt)("p",null,"These objects contain the following properties:"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"singularName"),": singular entity name, based on table name. Uses ",(0,i.kt)("a",{parentName:"li",href:"https://www.npmjs.com/package/inflected"},"inflected")," under the hood."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"pluralName"),": plural entity name (i.e ",(0,i.kt)("inlineCode",{parentName:"li"},"'pages'"),")"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"primaryKey"),": the field which is identified as primary key."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"table"),": original table name"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"fields"),": an object containing all fields details. Object key is the field name."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"camelCasedFields"),": an object containing all fields details in camelcase. If you have a column named ",(0,i.kt)("inlineCode",{parentName:"li"},"user_id")," you can access it using both ",(0,i.kt)("inlineCode",{parentName:"li"},"userId")," or ",(0,i.kt)("inlineCode",{parentName:"li"},"user_id"))),(0,i.kt)("h2",{id:"fields-detail"},"Fields detail"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"sqlType"),": The original field type. It may vary depending on the underlying DB Engine"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"isNullable"),": Whether the field can be ",(0,i.kt)("inlineCode",{parentName:"li"},"null")," or not"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"primaryKey"),": Whether the field is the primary key or not"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"camelcase"),": The ",(0,i.kt)("em",{parentName:"li"},"camelcased")," value of the field")),(0,i.kt)("h2",{id:"example"},"Example"),(0,i.kt)("p",null,"Given this SQL Schema (for PostgreSQL):"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-SQL"},'CREATE SEQUENCE IF NOT EXISTS pages_id_seq;\nCREATE TABLE "public"."pages" (\n "id" int4 NOT NULL DEFAULT nextval(\'pages_id_seq\'::regclass),\n "title" varchar,\n "body_content" text,\n "category_id" int4,\n PRIMARY KEY ("id")\n);\n')),(0,i.kt)("p",null,"The resulting mapping object will be:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-js"},"{\n singularName: 'page',\n pluralName: 'pages',\n primaryKey: 'id',\n table: 'pages',\n fields: {\n id: {\n sqlType: 'int4',\n isNullable: false,\n primaryKey: true,\n camelcase: 'id'\n },\n title: {\n sqlType: 'varchar',\n isNullable: true,\n camelcase: 'title'\n },\n body_content: {\n sqlType: 'text',\n isNullable: true,\n camelcase: 'bodyContent'\n },\n category_id: {\n sqlType: 'int4',\n isNullable: true,\n foreignKey: true,\n camelcase: 'categoryId'\n }\n }\n camelCasedFields: {\n id: {\n sqlType: 'int4',\n isNullable: false,\n primaryKey: true,\n camelcase: 'id'\n },\n title: {\n sqlType: 'varchar',\n isNullable: true,\n camelcase: 'title'\n },\n bodyContent: {\n sqlType: 'text',\n isNullable: true,\n camelcase: 'bodyContent'\n },\n categoryId: {\n sqlType: 'int4',\n isNullable: true,\n foreignKey: true,\n camelcase: 'categoryId'\n }\n },\n relations: []\n}\n")))}m.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/0bbc8da7.e75a01de.js b/assets/js/0bbc8da7.e75a01de.js new file mode 100644 index 00000000000..66f604e5a49 --- /dev/null +++ b/assets/js/0bbc8da7.e75a01de.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkplatformatic_oss_website=self.webpackChunkplatformatic_oss_website||[]).push([[10294],{3905:(e,t,r)=>{r.d(t,{Zo:()=>p,kt:()=>d});var n=r(67294);function i(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function a(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function o(e){for(var t=1;t=0||(i[r]=e[r]);return i}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(i[r]=e[r])}return i}var l=n.createContext({}),s=function(e){var t=n.useContext(l),r=t;return e&&(r="function"==typeof e?e(t):o(o({},t),e)),r},p=function(e){var t=s(e.components);return n.createElement(l.Provider,{value:t},e.children)},u="mdxType",f={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},m=n.forwardRef((function(e,t){var r=e.components,i=e.mdxType,a=e.originalType,l=e.parentName,p=c(e,["components","mdxType","originalType","parentName"]),u=s(r),m=i,d=u["".concat(l,".").concat(m)]||u[m]||f[m]||a;return r?n.createElement(d,o(o({ref:t},p),{},{components:r})):n.createElement(d,o({ref:t},p))}));function d(e,t){var r=arguments,i=t&&t.mdxType;if("string"==typeof e||i){var a=r.length,o=new Array(a);o[0]=m;var c={};for(var l in t)hasOwnProperty.call(t,l)&&(c[l]=t[l]);c.originalType=e,c[u]="string"==typeof e?e:i,o[1]=c;for(var s=2;s{r.r(t),r.d(t,{assets:()=>l,contentTitle:()=>o,default:()=>f,frontMatter:()=>a,metadata:()=>c,toc:()=>s});var n=r(87462),i=(r(67294),r(3905));const a={},o="Platformatic Service",c={unversionedId:"reference/service/introduction",id:"version-0.42.0/reference/service/introduction",title:"Platformatic Service",description:"Platformatic Service is an HTTP server that provides a developer tools for",source:"@site/versioned_docs/version-0.42.0/reference/service/introduction.md",sourceDirName:"reference/service",slug:"/reference/service/introduction",permalink:"/docs/0.42.0/reference/service/introduction",draft:!1,editUrl:"https://github.com/platformatic/oss/edit/main/versioned_docs/version-0.42.0/reference/service/introduction.md",tags:[],version:"0.42.0",frontMatter:{},sidebar:"docs",previous:{title:"Programmatic API",permalink:"/docs/0.42.0/reference/runtime/programmatic"},next:{title:"Configuration",permalink:"/docs/0.42.0/reference/service/configuration"}},l={},s=[{value:"Features",id:"features",level:2},{value:"Public beta",id:"public-beta",level:2},{value:"Standalone usage",id:"standalone-usage",level:2}],p={toc:s},u="wrapper";function f(e){let{components:t,...r}=e;return(0,i.kt)(u,(0,n.Z)({},p,r,{components:t,mdxType:"MDXLayout"}),(0,i.kt)("h1",{id:"platformatic-service"},"Platformatic Service"),(0,i.kt)("p",null,"Platformatic Service is an HTTP server that provides a developer tools for\nbuilding robust APIs with Node.js."),(0,i.kt)("p",null,"For a high level overview of how Platformatic DB works, please reference the\n",(0,i.kt)("a",{parentName:"p",href:"/docs/0.42.0/getting-started/architecture"},"Architecture")," guide."),(0,i.kt)("admonition",{type:"info"},(0,i.kt)("p",{parentName:"admonition"},"Platformatic Service is currently in ",(0,i.kt)("a",{parentName:"p",href:"#public-beta"},"public beta"),".")),(0,i.kt)("h2",{id:"features"},"Features"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},"Command-line interface: ",(0,i.kt)("a",{parentName:"li",href:"/docs/0.42.0/reference/cli#service"},(0,i.kt)("inlineCode",{parentName:"a"},"platformatic service"))),(0,i.kt)("li",{parentName:"ul"},"Add custom functionality in a ",(0,i.kt)("a",{parentName:"li",href:"/docs/0.42.0/reference/db/plugin"},"Fastify plugin")),(0,i.kt)("li",{parentName:"ul"},"Write plugins in JavaScript or ",(0,i.kt)("a",{parentName:"li",href:"/docs/0.42.0/reference/cli#compile"},"TypeScript")),(0,i.kt)("li",{parentName:"ul"},"Start Platformatic Service ",(0,i.kt)("a",{parentName:"li",href:"/docs/0.42.0/reference/service/programmatic"},"programmatically")," in tests or other applications"),(0,i.kt)("li",{parentName:"ul"},"Fully typed")),(0,i.kt)("h2",{id:"public-beta"},"Public beta"),(0,i.kt)("p",null,"Platformatic Service is in public beta. You can use it in production, but it's quite\nlikely that you'll encounter significant bugs."),(0,i.kt)("p",null,"If you run into a bug or have a suggestion for improvement, please\n",(0,i.kt)("a",{parentName:"p",href:"https://github.com/platformatic/platformatic/issues/new"},"raise an issue on GitHub"),". "),(0,i.kt)("h2",{id:"standalone-usage"},"Standalone usage"),(0,i.kt)("p",null,"If you're only interested in the features available in Platformatic Service, you can simply switch ",(0,i.kt)("inlineCode",{parentName:"p"},"platformatic")," with ",(0,i.kt)("inlineCode",{parentName:"p"},"@platformatic/service")," in the ",(0,i.kt)("inlineCode",{parentName:"p"},"dependencies")," of your ",(0,i.kt)("inlineCode",{parentName:"p"},"package.json"),", so that you'll only import fewer deps."),(0,i.kt)("p",null,"You can use the ",(0,i.kt)("inlineCode",{parentName:"p"},"plt-service")," command, it's the equivalent of ",(0,i.kt)("inlineCode",{parentName:"p"},"plt service"),"."))}f.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/0c64b3ca.7d53bc42.js b/assets/js/0c64b3ca.7d53bc42.js new file mode 100644 index 00000000000..8222f04d629 --- /dev/null +++ b/assets/js/0c64b3ca.7d53bc42.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkplatformatic_oss_website=self.webpackChunkplatformatic_oss_website||[]).push([[29731],{3905:(e,t,r)=>{r.d(t,{Zo:()=>u,kt:()=>g});var n=r(67294);function o(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function a(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function c(e){for(var t=1;t=0||(o[r]=e[r]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(o[r]=e[r])}return o}var l=n.createContext({}),p=function(e){var t=n.useContext(l),r=t;return e&&(r="function"==typeof e?e(t):c(c({},t),e)),r},u=function(e){var t=p(e.components);return n.createElement(l.Provider,{value:t},e.children)},s="mdxType",m={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},f=n.forwardRef((function(e,t){var r=e.components,o=e.mdxType,a=e.originalType,l=e.parentName,u=i(e,["components","mdxType","originalType","parentName"]),s=p(r),f=o,g=s["".concat(l,".").concat(f)]||s[f]||m[f]||a;return r?n.createElement(g,c(c({ref:t},u),{},{components:r})):n.createElement(g,c({ref:t},u))}));function g(e,t){var r=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var a=r.length,c=new Array(a);c[0]=f;var i={};for(var l in t)hasOwnProperty.call(t,l)&&(i[l]=t[l]);i.originalType=e,i[s]="string"==typeof e?e:o,c[1]=i;for(var p=2;p{r.r(t),r.d(t,{assets:()=>l,contentTitle:()=>c,default:()=>m,frontMatter:()=>a,metadata:()=>i,toc:()=>p});var n=r(87462),o=(r(67294),r(3905));const a={slug:"coming-soon",title:"Coming Soon",authors:["mcollina"]},c=void 0,i={permalink:"/blog/coming-soon",source:"@site/blog/2022-08-22-coming-soon.md",title:"Coming Soon",description:"Welcome to platformatic!",date:"2022-08-22T00:00:00.000Z",formattedDate:"August 22, 2022",tags:[],readingTime:.06,hasTruncateMarker:!1,authors:[{name:"Matteo Collina",title:"Platformatic founder",url:"https://github.com/mcollina",imageURL:"https://github.com/mcollina.png",key:"mcollina"}],frontMatter:{slug:"coming-soon",title:"Coming Soon",authors:["mcollina"]}},l={authorsImageUrls:[void 0]},p=[],u={toc:p},s="wrapper";function m(e){let{components:t,...r}=e;return(0,o.kt)(s,(0,n.Z)({},u,r,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("p",null,"Welcome to platformatic! "),(0,o.kt)("p",null,"We are working hard to launch platformatic, stay tuned!"))}m.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/0dcbfdc9.e69d7fcb.js b/assets/js/0dcbfdc9.e69d7fcb.js new file mode 100644 index 00000000000..f507790955c --- /dev/null +++ b/assets/js/0dcbfdc9.e69d7fcb.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkplatformatic_oss_website=self.webpackChunkplatformatic_oss_website||[]).push([[90485],{3905:(e,t,n)=>{n.d(t,{Zo:()=>s,kt:()=>d});var r=n(67294);function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function i(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function a(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var c=r.createContext({}),p=function(e){var t=r.useContext(c),n=t;return e&&(n="function"==typeof e?e(t):a(a({},t),e)),n},s=function(e){var t=p(e.components);return r.createElement(c.Provider,{value:t},e.children)},u="mdxType",m={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},f=r.forwardRef((function(e,t){var n=e.components,o=e.mdxType,i=e.originalType,c=e.parentName,s=l(e,["components","mdxType","originalType","parentName"]),u=p(n),f=o,d=u["".concat(c,".").concat(f)]||u[f]||m[f]||i;return n?r.createElement(d,a(a({ref:t},s),{},{components:n})):r.createElement(d,a({ref:t},s))}));function d(e,t){var n=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var i=n.length,a=new Array(i);a[0]=f;var l={};for(var c in t)hasOwnProperty.call(t,c)&&(l[c]=t[c]);l.originalType=e,l[u]="string"==typeof e?e:o,a[1]=l;for(var p=2;p{n.r(t),n.d(t,{assets:()=>c,contentTitle:()=>a,default:()=>m,frontMatter:()=>i,metadata:()=>l,toc:()=>p});var r=n(87462),o=(n(67294),n(3905));const i={},a="Programmatic API",l={unversionedId:"reference/client/programmatic",id:"version-0.41.2/reference/client/programmatic",title:"Programmatic API",description:"It is possible to use the Platformatic client without the generator.",source:"@site/versioned_docs/version-0.41.2/reference/client/programmatic.md",sourceDirName:"reference/client",slug:"/reference/client/programmatic",permalink:"/docs/0.41.2/reference/client/programmatic",draft:!1,editUrl:"https://github.com/platformatic/oss/edit/main/versioned_docs/version-0.41.2/reference/client/programmatic.md",tags:[],version:"0.41.2",frontMatter:{},sidebar:"docs",previous:{title:"Platformatic Client",permalink:"/docs/0.41.2/reference/client/introduction"},next:{title:"Frontend client",permalink:"/docs/0.41.2/reference/client/frontend"}},c={},p=[{value:"OpenAPI Client",id:"openapi-client",level:2},{value:"GraphQL Client",id:"graphql-client",level:2}],s={toc:p},u="wrapper";function m(e){let{components:t,...n}=e;return(0,o.kt)(u,(0,r.Z)({},s,n,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h1",{id:"programmatic-api"},"Programmatic API"),(0,o.kt)("p",null,"It is possible to use the Platformatic client without the generator."),(0,o.kt)("h2",{id:"openapi-client"},"OpenAPI Client"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-js"},"import { buildOpenAPIClient } from '@platformatic/client'\n\nconst client = await buildOpenAPIClient({\n url: `https://yourapi.com/documentation/json`, \n // path: 'path/to/openapi.json',\n headers: {\n 'foo': 'bar'\n }\n})\n\nconst res = await client.yourOperationName({ foo: 'bar' })\n\nconsole.log(res)\n")),(0,o.kt)("p",null,"If you use Typescript you can take advantage of the generated types file "),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-ts"},"import { buildOpenAPIClient } from '@platformatic/client'\nimport Client from './client'\n//\n// interface Client {\n// getMovies(req: GetMoviesRequest): Promise>;\n// createMovie(req: CreateMovieRequest): Promise;\n// ...\n// }\n//\n\nconst client: Client = await buildOpenAPIClient({\n url: `https://yourapi.com/documentation/json`, \n // path: 'path/to/openapi.json',\n headers: {\n 'foo': 'bar'\n }\n})\n\nconst res = await client.getMovies()\nconsole.log(res)\n")),(0,o.kt)("h2",{id:"graphql-client"},"GraphQL Client"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-js"},"import { buildGraphQLClient } from '@platformatic/client'\n\nconst client = await buildGraphQLClient({\n url: `https://yourapi.com/graphql`,\n headers: {\n 'foo': 'bar'\n }\n})\n\nconst res = await client.graphql({\n query: `\n mutation createMovie($title: String!) {\n saveMovie(input: {title: $title}) {\n id\n title\n }\n }\n `,\n variables: {\n title: 'The Matrix'\n }\n})\n\nconsole.log(res)\n")))}m.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/0ddf9e7d.a51dd777.js b/assets/js/0ddf9e7d.a51dd777.js new file mode 100644 index 00000000000..0b9f09cd16e --- /dev/null +++ b/assets/js/0ddf9e7d.a51dd777.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkplatformatic_oss_website=self.webpackChunkplatformatic_oss_website||[]).push([[45491],{3905:(e,t,n)=>{n.d(t,{Zo:()=>p,kt:()=>g});var r=n(67294);function a(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function o(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function i(e){for(var t=1;t=0||(a[n]=e[n]);return a}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(a[n]=e[n])}return a}var s=r.createContext({}),l=function(e){var t=r.useContext(s),n=t;return e&&(n="function"==typeof e?e(t):i(i({},t),e)),n},p=function(e){var t=l(e.components);return r.createElement(s.Provider,{value:t},e.children)},f="mdxType",m={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},u=r.forwardRef((function(e,t){var n=e.components,a=e.mdxType,o=e.originalType,s=e.parentName,p=c(e,["components","mdxType","originalType","parentName"]),f=l(n),u=a,g=f["".concat(s,".").concat(u)]||f[u]||m[u]||o;return n?r.createElement(g,i(i({ref:t},p),{},{components:n})):r.createElement(g,i({ref:t},p))}));function g(e,t){var n=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var o=n.length,i=new Array(o);i[0]=u;var c={};for(var s in t)hasOwnProperty.call(t,s)&&(c[s]=t[s]);c.originalType=e,c[f]="string"==typeof e?e:a,i[1]=c;for(var l=2;l{n.r(t),n.d(t,{assets:()=>s,contentTitle:()=>i,default:()=>m,frontMatter:()=>o,metadata:()=>c,toc:()=>l});var r=n(87462),a=(n(67294),n(3905));const o={},i="Programmatic API",c={unversionedId:"reference/service/programmatic",id:"version-0.41.1/reference/service/programmatic",title:"Programmatic API",description:"In many cases it's useful to start Platformatic Service using an API instead of",source:"@site/versioned_docs/version-0.41.1/reference/service/programmatic.md",sourceDirName:"reference/service",slug:"/reference/service/programmatic",permalink:"/docs/0.41.1/reference/service/programmatic",draft:!1,editUrl:"https://github.com/platformatic/oss/edit/main/versioned_docs/version-0.41.1/reference/service/programmatic.md",tags:[],version:"0.41.1",frontMatter:{},sidebar:"docs",previous:{title:"Plugin",permalink:"/docs/0.41.1/reference/service/plugin"},next:{title:"Packages",permalink:"/docs/0.41.1/category/packages"}},s={},l=[{value:"Creating a reusable application on top of Platformatic Service",id:"creating-a-reusable-application-on-top-of-platformatic-service",level:2}],p={toc:l},f="wrapper";function m(e){let{components:t,...n}=e;return(0,a.kt)(f,(0,r.Z)({},p,n,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h1",{id:"programmatic-api"},"Programmatic API"),(0,a.kt)("p",null,"In many cases it's useful to start Platformatic Service using an API instead of\ncommand line, e.g. in tests we want to start and stop our server."),(0,a.kt)("p",null,"The ",(0,a.kt)("inlineCode",{parentName:"p"},"buildServer")," function allows that:"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-js"},"import { buildServer } from '@platformatic/service'\n\nconst app = await buildServer('path/to/platformatic.service.json')\n\nawait app.start()\n\nconst res = await fetch(app.url)\nconsole.log(await res.json())\n\n// do something\n\nawait app.close()\n")),(0,a.kt)("p",null,"It is also possible to customize the configuration:"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-js"},"import { buildServer } from '@platformatic/service'\n\nconst app = await buildServer({\n server: {\n hostname: '127.0.0.1',\n port: 0\n }\n})\n\nawait app.start()\n\nconst res = await fetch(app.url)\nconsole.log(await res.json())\n\n// do something\n\nawait app.close()\n")),(0,a.kt)("h2",{id:"creating-a-reusable-application-on-top-of-platformatic-service"},"Creating a reusable application on top of Platformatic Service"),(0,a.kt)("p",null,(0,a.kt)("a",{parentName:"p",href:"/docs/0.41.1/reference/db/introduction"},"Platformatic DB")," is built on top of Platformatic Serivce.\nIf you want to build a similar kind of tool, follow this example:"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-js"},"import { buildServer, schema } from '@platformatic/service'\n\nasync function myPlugin (app, opts) {\n // app.platformatic.configManager contains an instance of the ConfigManager\n console.log(app.platformatic.configManager.current)\n\n await platformaticService(app, opts)\n}\n\n// break Fastify encapsulation\nmyPlugin[Symbol.for('skip-override')] = true\nmyPlugin.configType = 'myPlugin'\n\n// This is the schema for this reusable application configuration file,\n// customize at will but retain the base properties of the schema from\n// @platformatic/service\nmyPlugin.schema = schema\n\n// The configuration of the ConfigManager\nmyPlugin.configManagerConfig = {\n schema: foo.schema,\n envWhitelist: ['PORT', 'HOSTNAME'],\n allowToWatch: ['.env'],\n schemaOptions: {\n useDefaults: true,\n coerceTypes: true,\n allErrors: true,\n strict: false\n },\n async transformConfig () {\n console.log(this.current) // this is the current config\n\n // In this method you can alter the configuration before the application\n // is started. It's useful to apply some defaults that cannot be derived\n // inside the schema, such as resolving paths.\n }\n}\n\n\nconst server = await buildServer('path/to/config.json', myPlugin)\n\nawait server.start()\n\nconst res = await fetch(server.listeningOrigin)\nconsole.log(await res.json())\n\n// do something\n\nawait service.close()\n")))}m.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/0f44a6cc.de8fa4f0.js b/assets/js/0f44a6cc.de8fa4f0.js new file mode 100644 index 00000000000..c3fda54fae5 --- /dev/null +++ b/assets/js/0f44a6cc.de8fa4f0.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkplatformatic_oss_website=self.webpackChunkplatformatic_oss_website||[]).push([[68448],{3905:(e,t,n)=>{n.d(t,{Zo:()=>s,kt:()=>m});var r=n(67294);function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function a(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function i(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var c=r.createContext({}),l=function(e){var t=r.useContext(c),n=t;return e&&(n="function"==typeof e?e(t):i(i({},t),e)),n},s=function(e){var t=l(e.components);return r.createElement(c.Provider,{value:t},e.children)},f="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},d=r.forwardRef((function(e,t){var n=e.components,o=e.mdxType,a=e.originalType,c=e.parentName,s=p(e,["components","mdxType","originalType","parentName"]),f=l(n),d=o,m=f["".concat(c,".").concat(d)]||f[d]||u[d]||a;return n?r.createElement(m,i(i({ref:t},s),{},{components:n})):r.createElement(m,i({ref:t},s))}));function m(e,t){var n=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var a=n.length,i=new Array(a);i[0]=d;var p={};for(var c in t)hasOwnProperty.call(t,c)&&(p[c]=t[c]);p.originalType=e,p[f]="string"==typeof e?e:o,i[1]=p;for(var l=2;l{n.r(t),n.d(t,{assets:()=>c,contentTitle:()=>i,default:()=>u,frontMatter:()=>a,metadata:()=>p,toc:()=>l});var r=n(87462),o=(n(67294),n(3905));const a={},i="Introduction to the REST API",p={unversionedId:"reference/sql-openapi/introduction",id:"version-0.41.3/reference/sql-openapi/introduction",title:"Introduction to the REST API",description:"The Platformatic DB OpenAPI plugin automatically starts a REST API server (powered by Fastify) that provides CRUD (Create, Read, Update, Delete) functionality for each entity.",source:"@site/versioned_docs/version-0.41.3/reference/sql-openapi/introduction.md",sourceDirName:"reference/sql-openapi",slug:"/reference/sql-openapi/introduction",permalink:"/docs/0.41.3/reference/sql-openapi/introduction",draft:!1,editUrl:"https://github.com/platformatic/oss/edit/main/versioned_docs/version-0.41.3/reference/sql-openapi/introduction.md",tags:[],version:"0.41.3",frontMatter:{},sidebar:"docs",previous:{title:"Frontend client",permalink:"/docs/0.41.3/reference/client/frontend"},next:{title:"API",permalink:"/docs/0.41.3/reference/sql-openapi/api"}},c={},l=[{value:"Configuration",id:"configuration",level:2}],s={toc:l},f="wrapper";function u(e){let{components:t,...n}=e;return(0,o.kt)(f,(0,r.Z)({},s,n,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h1",{id:"introduction-to-the-rest-api"},"Introduction to the REST API"),(0,o.kt)("p",null,"The Platformatic DB OpenAPI plugin automatically starts a REST API server (powered by ",(0,o.kt)("a",{parentName:"p",href:"https://fastify.io"},"Fastify"),") that provides CRUD (",(0,o.kt)("strong",{parentName:"p"},"C"),"reate, ",(0,o.kt)("strong",{parentName:"p"},"R"),"ead, ",(0,o.kt)("strong",{parentName:"p"},"U"),"pdate, ",(0,o.kt)("strong",{parentName:"p"},"D"),"elete) functionality for each entity."),(0,o.kt)("h2",{id:"configuration"},"Configuration"),(0,o.kt)("p",null,"In the config file, under the ",(0,o.kt)("inlineCode",{parentName:"p"},'"db"')," section, the OpenAPI server is enabled by default. Although you can disable it setting the property ",(0,o.kt)("inlineCode",{parentName:"p"},"openapi")," to ",(0,o.kt)("inlineCode",{parentName:"p"},"false"),"."),(0,o.kt)("p",null,(0,o.kt)("em",{parentName:"p"},"Example")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-json"},'{\n ...\n "db": {\n "openapi": false\n }\n}\n')),(0,o.kt)("p",null,"As Platformatic DB uses ",(0,o.kt)("a",{parentName:"p",href:"https://github.com/fastify/fastify-swagger"},(0,o.kt)("inlineCode",{parentName:"a"},"fastify-swagger"))," under the hood, the ",(0,o.kt)("inlineCode",{parentName:"p"},'"openapi"')," property can be an object that follows the ",(0,o.kt)("a",{parentName:"p",href:"https://swagger.io/specification/#oasObject"},"OpenAPI Specification Object")," format."),(0,o.kt)("p",null,"This allows you to extend the output of the Swagger UI documentation."))}u.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/0f722a5b.776a0060.js b/assets/js/0f722a5b.776a0060.js new file mode 100644 index 00000000000..0b0af43ff97 --- /dev/null +++ b/assets/js/0f722a5b.776a0060.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkplatformatic_oss_website=self.webpackChunkplatformatic_oss_website||[]).push([[47777],{3905:(e,t,n)=>{n.d(t,{Zo:()=>m,kt:()=>u});var a=n(67294);function i(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function r(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);t&&(a=a.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,a)}return n}function l(e){for(var t=1;t=0||(i[n]=e[n]);return i}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(i[n]=e[n])}return i}var p=a.createContext({}),s=function(e){var t=a.useContext(p),n=t;return e&&(n="function"==typeof e?e(t):l(l({},t),e)),n},m=function(e){var t=s(e.components);return a.createElement(p.Provider,{value:t},e.children)},d="mdxType",k={inlineCode:"code",wrapper:function(e){var t=e.children;return a.createElement(a.Fragment,{},t)}},N=a.forwardRef((function(e,t){var n=e.components,i=e.mdxType,r=e.originalType,p=e.parentName,m=o(e,["components","mdxType","originalType","parentName"]),d=s(n),N=i,u=d["".concat(p,".").concat(N)]||d[N]||k[N]||r;return n?a.createElement(u,l(l({ref:t},m),{},{components:n})):a.createElement(u,l({ref:t},m))}));function u(e,t){var n=arguments,i=t&&t.mdxType;if("string"==typeof e||i){var r=n.length,l=new Array(r);l[0]=N;var o={};for(var p in t)hasOwnProperty.call(t,p)&&(o[p]=t[p]);o.originalType=e,o[d]="string"==typeof e?e:i,l[1]=o;for(var s=2;s{n.r(t),n.d(t,{assets:()=>p,contentTitle:()=>l,default:()=>k,frontMatter:()=>r,metadata:()=>o,toc:()=>s});var a=n(87462),i=(n(67294),n(3905));const r={},l="Configuration",o={unversionedId:"reference/db/configuration",id:"version-0.42.1/reference/db/configuration",title:"Configuration",description:"Platformatic DB is configured with a configuration file. It supports the use",source:"@site/versioned_docs/version-0.42.1/reference/db/configuration.md",sourceDirName:"reference/db",slug:"/reference/db/configuration",permalink:"/docs/reference/db/configuration",draft:!1,editUrl:"https://github.com/platformatic/oss/edit/main/versioned_docs/version-0.42.1/reference/db/configuration.md",tags:[],version:"0.42.1",frontMatter:{},sidebar:"docs",previous:{title:"Platformatic DB",permalink:"/docs/reference/db/introduction"},next:{title:"Migrations",permalink:"/docs/reference/db/migrations"}},p={},s=[{value:"Configuration file",id:"configuration-file",level:2},{value:"Supported formats",id:"supported-formats",level:3},{value:"Settings",id:"settings",level:2},{value:"db",id:"db",level:3},{value:"metrics",id:"metrics",level:3},{value:"migrations",id:"migrations",level:3},{value:"plugins",id:"plugins",level:3},{value:"typescript compilation options",id:"typescript-compilation-options",level:4},{value:"watch",id:"watch",level:3},{value:"server",id:"server",level:3},{value:"authorization",id:"authorization",level:3},{value:"Example",id:"example",level:4},{value:"telemetry",id:"telemetry",level:3},{value:"Environment variable placeholders",id:"environment-variable-placeholders",level:2},{value:"Example",id:"example-1",level:3},{value:"Setting environment variables",id:"setting-environment-variables",level:3},{value:"Allowed placeholder names",id:"allowed-placeholder-names",level:3},{value:"Sample Configuration",id:"sample-configuration",level:2}],m={toc:s},d="wrapper";function k(e){let{components:t,...n}=e;return(0,i.kt)(d,(0,a.Z)({},m,n,{components:t,mdxType:"MDXLayout"}),(0,i.kt)("h1",{id:"configuration"},"Configuration"),(0,i.kt)("p",null,"Platformatic DB is configured with a configuration file. It supports the use\nof environment variables as setting values with ",(0,i.kt)("a",{parentName:"p",href:"#configuration-placeholders"},"configuration placeholders"),"."),(0,i.kt)("h2",{id:"configuration-file"},"Configuration file"),(0,i.kt)("p",null,"If the Platformatic CLI finds a file in the current working directory matching\none of these filenames, it will automatically load it:"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"platformatic.db.json")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"platformatic.db.json5")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"platformatic.db.yml")," or ",(0,i.kt)("inlineCode",{parentName:"li"},"platformatic.db.yaml")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"platformatic.db.tml")," or ",(0,i.kt)("inlineCode",{parentName:"li"},"platformatic.db.toml"))),(0,i.kt)("p",null,"Alternatively, a ",(0,i.kt)("a",{parentName:"p",href:"/docs/reference/cli#db"},(0,i.kt)("inlineCode",{parentName:"a"},"--config")," option")," with a configuration\nfilepath can be passed to most ",(0,i.kt)("inlineCode",{parentName:"p"},"platformatic db")," CLI commands."),(0,i.kt)("p",null,"The configuration examples in this reference use JSON."),(0,i.kt)("h3",{id:"supported-formats"},"Supported formats"),(0,i.kt)("table",null,(0,i.kt)("thead",{parentName:"table"},(0,i.kt)("tr",{parentName:"thead"},(0,i.kt)("th",{parentName:"tr",align:"left"},"Format"),(0,i.kt)("th",{parentName:"tr",align:"left"},"Extensions"))),(0,i.kt)("tbody",{parentName:"table"},(0,i.kt)("tr",{parentName:"tbody"},(0,i.kt)("td",{parentName:"tr",align:"left"},"JSON"),(0,i.kt)("td",{parentName:"tr",align:"left"},(0,i.kt)("inlineCode",{parentName:"td"},".json"))),(0,i.kt)("tr",{parentName:"tbody"},(0,i.kt)("td",{parentName:"tr",align:"left"},"JSON5"),(0,i.kt)("td",{parentName:"tr",align:"left"},(0,i.kt)("inlineCode",{parentName:"td"},".json5"))),(0,i.kt)("tr",{parentName:"tbody"},(0,i.kt)("td",{parentName:"tr",align:"left"},"YAML"),(0,i.kt)("td",{parentName:"tr",align:"left"},(0,i.kt)("inlineCode",{parentName:"td"},".yml"),", ",(0,i.kt)("inlineCode",{parentName:"td"},".yaml"))),(0,i.kt)("tr",{parentName:"tbody"},(0,i.kt)("td",{parentName:"tr",align:"left"},"TOML"),(0,i.kt)("td",{parentName:"tr",align:"left"},(0,i.kt)("inlineCode",{parentName:"td"},".tml"))))),(0,i.kt)("p",null,"Comments are supported by the JSON5, YAML and TOML file formats."),(0,i.kt)("h2",{id:"settings"},"Settings"),(0,i.kt)("p",null,"Configuration settings are organised into the following groups:"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("a",{parentName:"li",href:"#db"},(0,i.kt)("inlineCode",{parentName:"a"},"db"))," ",(0,i.kt)("strong",{parentName:"li"},"(required)")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("a",{parentName:"li",href:"#metrics"},(0,i.kt)("inlineCode",{parentName:"a"},"metrics"))),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("a",{parentName:"li",href:"#migrations"},(0,i.kt)("inlineCode",{parentName:"a"},"migrations"))),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("a",{parentName:"li",href:"#plugins"},(0,i.kt)("inlineCode",{parentName:"a"},"plugins"))),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("a",{parentName:"li",href:"#server"},(0,i.kt)("inlineCode",{parentName:"a"},"server"))," ",(0,i.kt)("strong",{parentName:"li"},"(required)")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("a",{parentName:"li",href:"#authorization"},(0,i.kt)("inlineCode",{parentName:"a"},"authorization"))),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("a",{parentName:"li",href:"#telemetry"},(0,i.kt)("inlineCode",{parentName:"a"},"telemetry")))),(0,i.kt)("p",null,"Sensitive configuration settings, such as a database connection URL that contains\na password, should be set using ",(0,i.kt)("a",{parentName:"p",href:"#configuration-placeholders"},"configuration placeholders"),"."),(0,i.kt)("h3",{id:"db"},(0,i.kt)("inlineCode",{parentName:"h3"},"db")),(0,i.kt)("p",null,"A ",(0,i.kt)("strong",{parentName:"p"},"required")," object with the following settings:"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},(0,i.kt)("strong",{parentName:"p"},(0,i.kt)("inlineCode",{parentName:"strong"},"connectionString"))," (",(0,i.kt)("strong",{parentName:"p"},"required"),", ",(0,i.kt)("inlineCode",{parentName:"p"},"string"),") \u2014 Database connection URL."),(0,i.kt)("ul",{parentName:"li"},(0,i.kt)("li",{parentName:"ul"},"Example: ",(0,i.kt)("inlineCode",{parentName:"li"},"postgres://user:password@my-database:5432/db-name")))),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},(0,i.kt)("strong",{parentName:"p"}," ",(0,i.kt)("inlineCode",{parentName:"strong"},"schema"))," (array of ",(0,i.kt)("inlineCode",{parentName:"p"},"string"),") - Currently supported only for postgres, schemas used tolook for entities. If not provided, the default ",(0,i.kt)("inlineCode",{parentName:"p"},"public")," schema is used."),(0,i.kt)("p",{parentName:"li"},(0,i.kt)("em",{parentName:"p"},"Examples")))),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-json"},' "db": {\n "connectionString": "(...)",\n "schema": [\n "schema1", "schema2"\n ],\n ...\n\n },\n\n')),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},"Platformatic DB supports MySQL, MariaDB, PostgreSQL and SQLite.")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},(0,i.kt)("strong",{parentName:"p"},(0,i.kt)("inlineCode",{parentName:"strong"},"graphql"))," (",(0,i.kt)("inlineCode",{parentName:"p"},"boolean")," or ",(0,i.kt)("inlineCode",{parentName:"p"},"object"),", default: ",(0,i.kt)("inlineCode",{parentName:"p"},"true"),") \u2014 Controls the GraphQL API interface, with optional GraphiQL UI."),(0,i.kt)("p",{parentName:"li"},(0,i.kt)("em",{parentName:"p"},"Examples")),(0,i.kt)("p",{parentName:"li"},"Enables GraphQL support"),(0,i.kt)("pre",{parentName:"li"},(0,i.kt)("code",{parentName:"pre",className:"language-json"},'{\n "db": {\n ...\n "graphql": true\n }\n}\n')),(0,i.kt)("p",{parentName:"li"},"Enables GraphQL support with GraphiQL"),(0,i.kt)("pre",{parentName:"li"},(0,i.kt)("code",{parentName:"pre",className:"language-json"},'{\n "db": {\n ...\n "graphql": {\n "graphiql": true\n }\n }\n}\n')),(0,i.kt)("p",{parentName:"li"},"It's possible to selectively ignore entites:"),(0,i.kt)("pre",{parentName:"li"},(0,i.kt)("code",{parentName:"pre",className:"language-json"},'{\n "db": {\n ...\n "graphql": {\n "ignore": {\n "categories": true\n }\n }\n }\n}\n')),(0,i.kt)("p",{parentName:"li"},"It's possible to selectively ignore fields:"),(0,i.kt)("pre",{parentName:"li"},(0,i.kt)("code",{parentName:"pre",className:"language-json"},'{\n "db": {\n ...\n "graphql": {\n "ignore": {\n "categories": {\n "name": true\n }\n }\n }\n }\n}\n')),(0,i.kt)("p",{parentName:"li"},"It's possible to add a custom GraphQL schema during the startup:"),(0,i.kt)("pre",{parentName:"li"},(0,i.kt)("code",{parentName:"pre",className:"language-json"},'{\n "db": {\n ...\n "graphql": {\n "schemaPath": "path/to/schema.graphql"\n }\n }\n }\n}\n'))),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},(0,i.kt)("strong",{parentName:"p"},(0,i.kt)("inlineCode",{parentName:"strong"},"openapi"))," (",(0,i.kt)("inlineCode",{parentName:"p"},"boolean")," or ",(0,i.kt)("inlineCode",{parentName:"p"},"object"),", default: ",(0,i.kt)("inlineCode",{parentName:"p"},"true"),") \u2014 Enables OpenAPI REST support."),(0,i.kt)("ul",{parentName:"li"},(0,i.kt)("li",{parentName:"ul"},"If value is an object, all ",(0,i.kt)("a",{parentName:"li",href:"https://swagger.io/specification/"},"OpenAPI v3")," allowed properties can be passed. Also a ",(0,i.kt)("inlineCode",{parentName:"li"},"prefix")," property can be passed to set the OpenAPI prefix."),(0,i.kt)("li",{parentName:"ul"},"Platformatic DB uses ",(0,i.kt)("a",{parentName:"li",href:"https://github.com/fastify/fastify-swagger"},(0,i.kt)("inlineCode",{parentName:"a"},"@fastify/swagger"))," under the hood to manage this configuration.")),(0,i.kt)("p",{parentName:"li"},(0,i.kt)("em",{parentName:"p"},"Examples")),(0,i.kt)("p",{parentName:"li"},"Enables OpenAPI"),(0,i.kt)("pre",{parentName:"li"},(0,i.kt)("code",{parentName:"pre",className:"language-json"},'{\n "db": {\n ...\n "openapi": true\n }\n}\n')),(0,i.kt)("p",{parentName:"li"},"Enables OpenAPI with prefix"),(0,i.kt)("pre",{parentName:"li"},(0,i.kt)("code",{parentName:"pre",className:"language-json"},'{\n "db": {\n ...\n "openapi": {\n "prefix": "/api"\n }\n }\n}\n')),(0,i.kt)("p",{parentName:"li"},"Enables OpenAPI with options"),(0,i.kt)("pre",{parentName:"li"},(0,i.kt)("code",{parentName:"pre",className:"language-json"},'{\n "db": {\n ...\n "openapi": {\n "info": {\n "title": "Platformatic DB",\n "description": "Exposing a SQL database as REST"\n }\n }\n }\n}\n')),(0,i.kt)("p",{parentName:"li"},"You can for example add the ",(0,i.kt)("inlineCode",{parentName:"p"},"security")," section, so that Swagger will allow you to add the authentication header to your requests.\nIn the following code snippet, we're adding a Bearer token in the form of a ",(0,i.kt)("a",{parentName:"p",href:"/docs/reference/db/authorization/strategies#json-web-token-jwt"},"JWT"),":"),(0,i.kt)("pre",{parentName:"li"},(0,i.kt)("code",{parentName:"pre",className:"language-json"},'{\n "db": {\n ...\n "openapi": {\n ...\n "security": [{ "bearerAuth": [] }],\n "components": {\n "securitySchemes": {\n "bearerAuth": {\n "type": "http",\n "scheme": "bearer",\n "bearerFormat": "JWT"\n }\n }\n }\n }\n }\n}\n')),(0,i.kt)("p",{parentName:"li"},"It's possible to selectively ignore entites:"),(0,i.kt)("pre",{parentName:"li"},(0,i.kt)("code",{parentName:"pre",className:"language-json"},'{\n "db": {\n ...\n "openapi": {\n "ignore": {\n "categories": true\n }\n }\n }\n}\n')),(0,i.kt)("p",{parentName:"li"},"It's possible to selectively ignore fields:"),(0,i.kt)("pre",{parentName:"li"},(0,i.kt)("code",{parentName:"pre",className:"language-json"},'{\n "db": {\n ...\n "openapi": {\n "ignore": {\n "categories": {\n "name": true\n }\n }\n }\n }\n}\n'))),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},(0,i.kt)("strong",{parentName:"p"},(0,i.kt)("inlineCode",{parentName:"strong"},"ignore"))," (",(0,i.kt)("inlineCode",{parentName:"p"},"object"),") \u2014 Key/value object that defines which database tables should not be mapped as API entities."),(0,i.kt)("p",{parentName:"li"},(0,i.kt)("em",{parentName:"p"},"Examples")),(0,i.kt)("pre",{parentName:"li"},(0,i.kt)("code",{parentName:"pre",className:"language-json"},'{\n "db": {\n ...\n "ignore": {\n "versions": true // "versions" table will be not mapped with GraphQL/REST APIs\n }\n }\n}\n'))),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},(0,i.kt)("strong",{parentName:"p"},(0,i.kt)("inlineCode",{parentName:"strong"},"events"))," (",(0,i.kt)("inlineCode",{parentName:"p"},"boolean")," or ",(0,i.kt)("inlineCode",{parentName:"p"},"object"),", default: ",(0,i.kt)("inlineCode",{parentName:"p"},"true"),") \u2014 Controls the support for events published by the SQL mapping layer.\nIf enabled, this option add support for GraphQL Subscription over WebSocket. By default it uses an in-process message broker.\nIt's possible to configure it to use Redis instead."),(0,i.kt)("p",{parentName:"li"},(0,i.kt)("em",{parentName:"p"},"Examples")),(0,i.kt)("pre",{parentName:"li"},(0,i.kt)("code",{parentName:"pre",className:"language-json"},'{\n "db": {\n ...\n "events": {\n "connectionString": "redis://:password@redishost.com:6380/"\n }\n }\n}\n'))),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},(0,i.kt)("strong",{parentName:"p"},(0,i.kt)("inlineCode",{parentName:"strong"},"schemalock"))," (",(0,i.kt)("inlineCode",{parentName:"p"},"boolean")," or ",(0,i.kt)("inlineCode",{parentName:"p"},"object"),", default: ",(0,i.kt)("inlineCode",{parentName:"p"},"false"),") \u2014 Controls the caching of the database schema on disk.\nIf set to ",(0,i.kt)("inlineCode",{parentName:"p"},"true")," the database schema metadata is stored inside a ",(0,i.kt)("inlineCode",{parentName:"p"},"schema.lock")," file.\nIt's also possible to configure the location of that file by specifying a path, like so:"),(0,i.kt)("p",{parentName:"li"},(0,i.kt)("em",{parentName:"p"},"Examples")),(0,i.kt)("pre",{parentName:"li"},(0,i.kt)("code",{parentName:"pre",className:"language-json"},'{\n "db": {\n ...\n "schemalock": {\n "path": "./dbmetadata"\n }\n }\n}\n')),(0,i.kt)("p",{parentName:"li"},"Starting Platformatic DB or running a migration will automatically create the schemalock file."))),(0,i.kt)("h3",{id:"metrics"},(0,i.kt)("inlineCode",{parentName:"h3"},"metrics")),(0,i.kt)("p",null,"Configuration for a ",(0,i.kt)("a",{parentName:"p",href:"https://prometheus.io/"},"Prometheus")," server that will export monitoring metrics\nfor the current server instance. It uses ",(0,i.kt)("a",{parentName:"p",href:"https://github.com/SkeLLLa/fastify-metrics"},(0,i.kt)("inlineCode",{parentName:"a"},"fastify-metrics")),"\nunder the hood."),(0,i.kt)("p",null,"This setting can be a ",(0,i.kt)("inlineCode",{parentName:"p"},"boolean")," or an ",(0,i.kt)("inlineCode",{parentName:"p"},"object"),". If set to ",(0,i.kt)("inlineCode",{parentName:"p"},"true")," the Prometheus server will listen on ",(0,i.kt)("inlineCode",{parentName:"p"},"http://0.0.0.0:9090"),"."),(0,i.kt)("p",null,"Supported object properties:"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("strong",{parentName:"li"},(0,i.kt)("inlineCode",{parentName:"strong"},"hostname"))," (",(0,i.kt)("inlineCode",{parentName:"li"},"string"),") \u2014 The hostname where Prometheus server will listen for connections."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("strong",{parentName:"li"},(0,i.kt)("inlineCode",{parentName:"strong"},"port"))," (",(0,i.kt)("inlineCode",{parentName:"li"},"number"),") \u2014 The port where Prometheus server will listen for connections."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("strong",{parentName:"li"},(0,i.kt)("inlineCode",{parentName:"strong"},"auth"))," (",(0,i.kt)("inlineCode",{parentName:"li"},"object"),") \u2014 Basic Auth configuration. ",(0,i.kt)("strong",{parentName:"li"},(0,i.kt)("inlineCode",{parentName:"strong"},"username"))," and ",(0,i.kt)("strong",{parentName:"li"},(0,i.kt)("inlineCode",{parentName:"strong"},"password"))," are required here\n(use ",(0,i.kt)("a",{parentName:"li",href:"#environment-variables"},"environment variables"),").")),(0,i.kt)("h3",{id:"migrations"},(0,i.kt)("inlineCode",{parentName:"h3"},"migrations")),(0,i.kt)("p",null,"Configures ",(0,i.kt)("a",{parentName:"p",href:"https://github.com/rickbergfalk/postgrator"},"Postgrator")," to run migrations against the database."),(0,i.kt)("p",null,"An optional object with the following settings:"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("strong",{parentName:"li"},(0,i.kt)("inlineCode",{parentName:"strong"},"dir"))," (",(0,i.kt)("strong",{parentName:"li"},"required"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"string"),"): Relative path to the migrations directory."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("strong",{parentName:"li"},(0,i.kt)("inlineCode",{parentName:"strong"},"autoApply"))," (",(0,i.kt)("inlineCode",{parentName:"li"},"boolean"),", default: ",(0,i.kt)("inlineCode",{parentName:"li"},"false"),"): Automatically apply migrations when Platformatic DB server starts.")),(0,i.kt)("h3",{id:"plugins"},(0,i.kt)("inlineCode",{parentName:"h3"},"plugins")),(0,i.kt)("p",null,"An optional object that defines the plugins loaded by Platformatic DB."),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("strong",{parentName:"li"},(0,i.kt)("inlineCode",{parentName:"strong"},"paths"))," (",(0,i.kt)("strong",{parentName:"li"},"required"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"array"),"): an array of paths (",(0,i.kt)("inlineCode",{parentName:"li"},"string"),")\nor an array of objects composed as follows,",(0,i.kt)("ul",{parentName:"li"},(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"path")," (",(0,i.kt)("inlineCode",{parentName:"li"},"string"),"): Relative path to plugin's entry point."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"options")," (",(0,i.kt)("inlineCode",{parentName:"li"},"object"),"): Optional plugin options."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"encapsulate")," (",(0,i.kt)("inlineCode",{parentName:"li"},"boolean"),"): if the path is a folder, it instruct Platformatic to not\n",(0,i.kt)("a",{parentName:"li",href:"https://www.fastify.io/docs/latest/Reference/Encapsulation/"},"encapsulate")," those plugins,\nallowing decorators and hooks to be shared across all routes."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"maxDepth")," (",(0,i.kt)("inlineCode",{parentName:"li"},"integer"),"): if the path is a folder, it limits the depth to load the content from."))),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("strong",{parentName:"li"},(0,i.kt)("inlineCode",{parentName:"strong"},"typescript"))," (",(0,i.kt)("inlineCode",{parentName:"li"},"boolean")," or ",(0,i.kt)("inlineCode",{parentName:"li"},"object"),"): enable TypeScript compilation. A ",(0,i.kt)("inlineCode",{parentName:"li"},"tsconfig.json")," file is required in the same folder.")),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-json"},'{\n "plugins": {\n "paths": [{\n "path": "./my-plugin.js",\n "options": {\n "foo": "bar"\n }\n }]\n }\n}\n')),(0,i.kt)("h4",{id:"typescript-compilation-options"},(0,i.kt)("inlineCode",{parentName:"h4"},"typescript")," compilation options"),(0,i.kt)("p",null,"The ",(0,i.kt)("inlineCode",{parentName:"p"},"typescript")," option can also be an object to customize the compilation. Here are the supported options:"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"enabled")," (",(0,i.kt)("inlineCode",{parentName:"li"},"boolean"),"): enables compilation"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"tsConfig")," (",(0,i.kt)("inlineCode",{parentName:"li"},"string"),"): path to the ",(0,i.kt)("inlineCode",{parentName:"li"},"tsconfig.json")," file relative to the configuration"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"outDir")," (",(0,i.kt)("inlineCode",{parentName:"li"},"string"),"): the output directory of ",(0,i.kt)("inlineCode",{parentName:"li"},"tsconfig.json"),", in case ",(0,i.kt)("inlineCode",{parentName:"li"},"tsconfig.json")," is not available\nand and ",(0,i.kt)("inlineCode",{parentName:"li"},"enabled")," is set to ",(0,i.kt)("inlineCode",{parentName:"li"},"false")," (procution build)"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"flags")," (array of ",(0,i.kt)("inlineCode",{parentName:"li"},"string"),"): flags to be passed to ",(0,i.kt)("inlineCode",{parentName:"li"},"tsc"),". Overrides ",(0,i.kt)("inlineCode",{parentName:"li"},"tsConfig"),".\n")),(0,i.kt)("p",null,"Example:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-json"},'{\n "plugins": {\n "paths": [{\n "path": "./my-plugin.js",\n "options": {\n "foo": "bar"\n }\n }],\n "typescript": {\n "enabled": false,\n "tsConfig": "./path/to/tsconfig.json",\n "outDir": "dist"\n }\n }\n}\n')),(0,i.kt)("h3",{id:"watch"},(0,i.kt)("inlineCode",{parentName:"h3"},"watch")),(0,i.kt)("p",null,"Disable watching for file changes if set to ",(0,i.kt)("inlineCode",{parentName:"p"},"false"),". It can also be customized with the following options:"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},(0,i.kt)("strong",{parentName:"p"},(0,i.kt)("inlineCode",{parentName:"strong"},"ignore"))," (",(0,i.kt)("inlineCode",{parentName:"p"},"string[]"),", default: ",(0,i.kt)("inlineCode",{parentName:"p"},"null"),"): List of glob patterns to ignore when watching for changes. If ",(0,i.kt)("inlineCode",{parentName:"p"},"null")," or not specified, ignore rule is not applied. Ignore option doesn't work for typescript files.")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},(0,i.kt)("strong",{parentName:"p"},(0,i.kt)("inlineCode",{parentName:"strong"},"allow"))," (",(0,i.kt)("inlineCode",{parentName:"p"},"string[]"),", default: ",(0,i.kt)("inlineCode",{parentName:"p"},"['*.js', '**/*.js']"),"): List of glob patterns to allow when watching for changes. If ",(0,i.kt)("inlineCode",{parentName:"p"},"null")," or not specified, allow rule is not applied. Allow option doesn't work for typescript files."),(0,i.kt)("p",{parentName:"li"},(0,i.kt)("em",{parentName:"p"},"Example")),(0,i.kt)("pre",{parentName:"li"},(0,i.kt)("code",{parentName:"pre",className:"language-json"},'{\n "watch": {\n "ignore": ["*.mjs", "**/*.mjs"],\n "allow": ["my-plugin.js", "plugins/*.js"]\n }\n}\n')))),(0,i.kt)("h3",{id:"server"},(0,i.kt)("inlineCode",{parentName:"h3"},"server")),(0,i.kt)("p",null,"A ",(0,i.kt)("strong",{parentName:"p"},"required")," object with the following settings:"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},(0,i.kt)("strong",{parentName:"p"},(0,i.kt)("inlineCode",{parentName:"strong"},"hostname"))," (",(0,i.kt)("strong",{parentName:"p"},"required"),", ",(0,i.kt)("inlineCode",{parentName:"p"},"string"),") \u2014 Hostname where Platformatic DB server will listen for connections.")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},(0,i.kt)("strong",{parentName:"p"},(0,i.kt)("inlineCode",{parentName:"strong"},"port"))," (",(0,i.kt)("strong",{parentName:"p"},"required"),", ",(0,i.kt)("inlineCode",{parentName:"p"},"number"),") \u2014 Port where Platformatic DB server will listen for connections.")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},(0,i.kt)("strong",{parentName:"p"},(0,i.kt)("inlineCode",{parentName:"strong"},"healthCheck"))," (",(0,i.kt)("inlineCode",{parentName:"p"},"boolean")," or ",(0,i.kt)("inlineCode",{parentName:"p"},"object"),") \u2014 Enables the health check endpoint."),(0,i.kt)("ul",{parentName:"li"},(0,i.kt)("li",{parentName:"ul"},"Powered by ",(0,i.kt)("a",{parentName:"li",href:"https://github.com/fastify/under-pressure"},(0,i.kt)("inlineCode",{parentName:"a"},"@fastify/under-pressure")),"."),(0,i.kt)("li",{parentName:"ul"},"The value can be an object, used to specify the interval between checks in milliseconds (default: ",(0,i.kt)("inlineCode",{parentName:"li"},"5000"),")")),(0,i.kt)("p",{parentName:"li"},(0,i.kt)("em",{parentName:"p"},"Example")),(0,i.kt)("pre",{parentName:"li"},(0,i.kt)("code",{parentName:"pre",className:"language-json"},'{\n "server": {\n ...\n "healthCheck": {\n "interval": 2000\n }\n }\n}\n'))),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},(0,i.kt)("strong",{parentName:"p"},(0,i.kt)("inlineCode",{parentName:"strong"},"cors"))," (",(0,i.kt)("inlineCode",{parentName:"p"},"object"),") \u2014 Configuration for Cross-Origin Resource Sharing (CORS) headers."),(0,i.kt)("ul",{parentName:"li"},(0,i.kt)("li",{parentName:"ul"},"All options will be passed to the ",(0,i.kt)("a",{parentName:"li",href:"https://github.com/fastify/fastify-cors"},(0,i.kt)("inlineCode",{parentName:"a"},"@fastify/cors"))," plugin. In order to specify a ",(0,i.kt)("inlineCode",{parentName:"li"},"RegExp")," object, you can pass ",(0,i.kt)("inlineCode",{parentName:"li"},"{ regexp: 'yourregexp' }"),",\nit will be automatically converted"))),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},(0,i.kt)("strong",{parentName:"p"},(0,i.kt)("inlineCode",{parentName:"strong"},"https"))," (",(0,i.kt)("inlineCode",{parentName:"p"},"object"),") - Configuration for HTTPS supporting the following options."),(0,i.kt)("ul",{parentName:"li"},(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"key")," (",(0,i.kt)("strong",{parentName:"li"},"required"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"string"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"object"),", or ",(0,i.kt)("inlineCode",{parentName:"li"},"array"),") - If ",(0,i.kt)("inlineCode",{parentName:"li"},"key")," is a string, it specifies the private key to be used. If ",(0,i.kt)("inlineCode",{parentName:"li"},"key")," is an object, it must have a ",(0,i.kt)("inlineCode",{parentName:"li"},"path")," property specifying the private key file. Multiple keys are supported by passing an array of keys."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"cert")," (",(0,i.kt)("strong",{parentName:"li"},"required"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"string"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"object"),", or ",(0,i.kt)("inlineCode",{parentName:"li"},"array"),") - If ",(0,i.kt)("inlineCode",{parentName:"li"},"cert")," is a string, it specifies the certificate to be used. If ",(0,i.kt)("inlineCode",{parentName:"li"},"cert")," is an object, it must have a ",(0,i.kt)("inlineCode",{parentName:"li"},"path")," property specifying the certificate file. Multiple certificates are supported by passing an array of keys."))),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},(0,i.kt)("strong",{parentName:"p"},(0,i.kt)("inlineCode",{parentName:"strong"},"logger"))," (",(0,i.kt)("inlineCode",{parentName:"p"},"object"),") -- the ",(0,i.kt)("a",{parentName:"p",href:"https://www.fastify.io/docs/latest/Reference/Server/#logger"},"logger configuration"),".")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},(0,i.kt)("strong",{parentName:"p"},(0,i.kt)("inlineCode",{parentName:"strong"},"pluginTimeout"))," (",(0,i.kt)("inlineCode",{parentName:"p"},"integer"),") -- the number of milliseconds to wait for a Fastify plugin to load")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},(0,i.kt)("strong",{parentName:"p"},(0,i.kt)("inlineCode",{parentName:"strong"},"bodyLimit"))," (",(0,i.kt)("inlineCode",{parentName:"p"},"integer"),") -- the maximum request body size in bytes")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},(0,i.kt)("strong",{parentName:"p"},(0,i.kt)("inlineCode",{parentName:"strong"},"maxParamLength"))," (",(0,i.kt)("inlineCode",{parentName:"p"},"integer"),") -- the maximum length of a request parameter")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},(0,i.kt)("strong",{parentName:"p"},(0,i.kt)("inlineCode",{parentName:"strong"},"caseSensitive"))," (",(0,i.kt)("inlineCode",{parentName:"p"},"boolean"),") -- if ",(0,i.kt)("inlineCode",{parentName:"p"},"true"),", the router will be case sensitive")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},(0,i.kt)("strong",{parentName:"p"},(0,i.kt)("inlineCode",{parentName:"strong"},"ignoreTrailingSlash"))," (",(0,i.kt)("inlineCode",{parentName:"p"},"boolean"),") -- if ",(0,i.kt)("inlineCode",{parentName:"p"},"true"),", the router will ignore the trailing slash")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},(0,i.kt)("strong",{parentName:"p"},(0,i.kt)("inlineCode",{parentName:"strong"},"ignoreTrailingSlash"))," (",(0,i.kt)("inlineCode",{parentName:"p"},"boolean"),") -- if ",(0,i.kt)("inlineCode",{parentName:"p"},"true"),", the router will ignore the trailing slash")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},(0,i.kt)("strong",{parentName:"p"},(0,i.kt)("inlineCode",{parentName:"strong"},"connectionTimeout"))," (",(0,i.kt)("inlineCode",{parentName:"p"},"integer"),") -- the milliseconds to wait for a new HTTP request")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},(0,i.kt)("strong",{parentName:"p"},(0,i.kt)("inlineCode",{parentName:"strong"},"keepAliveTimeout"))," (",(0,i.kt)("inlineCode",{parentName:"p"},"integer"),") -- the milliseconds to wait for a keep-alive HTTP request")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},(0,i.kt)("strong",{parentName:"p"},(0,i.kt)("inlineCode",{parentName:"strong"},"maxRequestsPerSocket"))," (",(0,i.kt)("inlineCode",{parentName:"p"},"integer"),") -- the maximum number of requests per socket")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},(0,i.kt)("strong",{parentName:"p"},(0,i.kt)("inlineCode",{parentName:"strong"},"forceCloseConnections"))," (",(0,i.kt)("inlineCode",{parentName:"p"},"boolean")," or ",(0,i.kt)("inlineCode",{parentName:"p"},'"idle"'),") -- if ",(0,i.kt)("inlineCode",{parentName:"p"},"true"),", the server will close all connections when it is closed")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},(0,i.kt)("strong",{parentName:"p"},(0,i.kt)("inlineCode",{parentName:"strong"},"requestTimeout"))," (",(0,i.kt)("inlineCode",{parentName:"p"},"integer"),") -- the milliseconds to wait for a request to be completed")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},(0,i.kt)("strong",{parentName:"p"},(0,i.kt)("inlineCode",{parentName:"strong"},"disableRequestLogging"))," (",(0,i.kt)("inlineCode",{parentName:"p"},"boolean"),") -- if ",(0,i.kt)("inlineCode",{parentName:"p"},"true"),", the request logger will be disabled")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},(0,i.kt)("strong",{parentName:"p"},(0,i.kt)("inlineCode",{parentName:"strong"},"exposeHeadRoutes"))," (",(0,i.kt)("inlineCode",{parentName:"p"},"boolean"),") -- if ",(0,i.kt)("inlineCode",{parentName:"p"},"true"),", the router will expose HEAD routes")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},(0,i.kt)("strong",{parentName:"p"},(0,i.kt)("inlineCode",{parentName:"strong"},"serializerOpts"))," (",(0,i.kt)("inlineCode",{parentName:"p"},"object"),") -- the ",(0,i.kt)("a",{parentName:"p",href:"https://www.fastify.io/docs/latest/Reference/Server/#serializeropts"},"serializer options"))),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},(0,i.kt)("strong",{parentName:"p"},(0,i.kt)("inlineCode",{parentName:"strong"},"requestIdHeader"))," (",(0,i.kt)("inlineCode",{parentName:"p"},"string")," or ",(0,i.kt)("inlineCode",{parentName:"p"},"false"),") -- the name of the header that will contain the request id")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},(0,i.kt)("strong",{parentName:"p"},(0,i.kt)("inlineCode",{parentName:"strong"},"requestIdLogLabel"))," (",(0,i.kt)("inlineCode",{parentName:"p"},"string"),") -- Defines the label used for the request identifier when logging the request. default: ",(0,i.kt)("inlineCode",{parentName:"p"},"'reqId'"))),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},(0,i.kt)("strong",{parentName:"p"},(0,i.kt)("inlineCode",{parentName:"strong"},"jsonShorthand"))," (",(0,i.kt)("inlineCode",{parentName:"p"},"boolean"),") -- default: ",(0,i.kt)("inlineCode",{parentName:"p"},"true")," -- visit ",(0,i.kt)("a",{parentName:"p",href:"https://www.fastify.io/docs/latest/Reference/Server/#jsonshorthand"},"fastify docs")," for more details")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("p",{parentName:"li"},(0,i.kt)("strong",{parentName:"p"},(0,i.kt)("inlineCode",{parentName:"strong"},"trustProxy"))," (",(0,i.kt)("inlineCode",{parentName:"p"},"boolean")," or ",(0,i.kt)("inlineCode",{parentName:"p"},"integer")," or ",(0,i.kt)("inlineCode",{parentName:"p"},"string")," or ",(0,i.kt)("inlineCode",{parentName:"p"},"String[]"),") -- default: ",(0,i.kt)("inlineCode",{parentName:"p"},"false")," -- visit ",(0,i.kt)("a",{parentName:"p",href:"https://www.fastify.io/docs/latest/Reference/Server/#trustproxy"},"fastify docs")," for more details"))),(0,i.kt)("admonition",{type:"tip"},(0,i.kt)("p",{parentName:"admonition"},"See the ",(0,i.kt)("a",{parentName:"p",href:"https://www.fastify.io/docs/latest/Reference/Server"},"fastify docs")," for more details.")),(0,i.kt)("h3",{id:"authorization"},(0,i.kt)("inlineCode",{parentName:"h3"},"authorization")),(0,i.kt)("p",null,"An optional object with the following settings:"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"adminSecret")," (",(0,i.kt)("inlineCode",{parentName:"li"},"string"),"): A secret that should be sent in an\n",(0,i.kt)("inlineCode",{parentName:"li"},"x-platformatic-admin-secret")," HTTP header when performing GraphQL/REST API\ncalls. Use an ",(0,i.kt)("a",{parentName:"li",href:"#environment-variable-placeholders"},"environment variable placeholder"),"\nto securely provide the value for this setting."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"roleKey")," (",(0,i.kt)("inlineCode",{parentName:"li"},"string"),", default: ",(0,i.kt)("inlineCode",{parentName:"li"},"X-PLATFORMATIC-ROLE"),"): The name of the key in user\nmetadata that is used to store the user's roles. See ",(0,i.kt)("a",{parentName:"li",href:"/docs/reference/db/authorization/user-roles-metadata#role-configuration"},"Role configuration"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"anonymousRole")," (",(0,i.kt)("inlineCode",{parentName:"li"},"string"),", default: ",(0,i.kt)("inlineCode",{parentName:"li"},"anonymous"),"): The name of the anonymous role. See ",(0,i.kt)("a",{parentName:"li",href:"/docs/reference/db/authorization/user-roles-metadata#role-configuration"},"Role configuration"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"jwt")," (",(0,i.kt)("inlineCode",{parentName:"li"},"object"),"): Configuration for the ",(0,i.kt)("a",{parentName:"li",href:"/docs/reference/db/authorization/strategies#json-web-token-jwt"},"JWT authorization strategy"),".\nAny option accepted by ",(0,i.kt)("a",{parentName:"li",href:"https://github.com/fastify/fastify-jwt"},(0,i.kt)("inlineCode",{parentName:"a"},"@fastify/jwt")),"\ncan be passed in this object.",(0,i.kt)("ul",{parentName:"li"},(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"secret")," (required, ",(0,i.kt)("inlineCode",{parentName:"li"},"string")," or ",(0,i.kt)("inlineCode",{parentName:"li"},"object"),"): The secret key that the JWT was signed with.\nSee the ",(0,i.kt)("a",{parentName:"li",href:"https://github.com/fastify/fastify-jwt#secret-required"},(0,i.kt)("inlineCode",{parentName:"a"},"@fastify/jwt")," documentation"),"\nfor accepted string and object values. Use an ",(0,i.kt)("a",{parentName:"li",href:"#environment-variable-placeholders"},"environment variable placeholder"),"\nto securely provide the value for this setting."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"jwks")," (",(0,i.kt)("inlineCode",{parentName:"li"},"boolean")," or ",(0,i.kt)("inlineCode",{parentName:"li"},"object"),"): Configure authorization with JSON Web Key Sets (JWKS). See the ",(0,i.kt)("a",{parentName:"li",href:"/docs/reference/db/authorization/strategies#json-web-key-sets-jwks"},"JWKS documentation"),"."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"namespace")," (",(0,i.kt)("inlineCode",{parentName:"li"},"string"),"): Configure a ",(0,i.kt)("a",{parentName:"li",href:"/docs/reference/db/authorization/strategies#jwt-custom-claim-namespace"},"JWT Custom Claim Namespace"),"\nto avoid name collisions."))),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"webhook")," (",(0,i.kt)("inlineCode",{parentName:"li"},"object"),"): Configuration for the ",(0,i.kt)("a",{parentName:"li",href:"/docs/reference/db/authorization/strategies#webhook"},"Webhook authorization strategy"),".",(0,i.kt)("ul",{parentName:"li"},(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"url")," (required, ",(0,i.kt)("inlineCode",{parentName:"li"},"string"),"): Webhook URL that Platformatic DB will make a\nPOST request to."))),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"rules")," (",(0,i.kt)("inlineCode",{parentName:"li"},"array"),"): Authorization rules that describe the CRUD actions that\nusers are allowed to perform against entities. See ",(0,i.kt)("a",{parentName:"li",href:"/docs/reference/db/authorization/rules"},"Rules"),"\ndocumentation.")),(0,i.kt)("admonition",{type:"note"},(0,i.kt)("p",{parentName:"admonition"},"If an ",(0,i.kt)("inlineCode",{parentName:"p"},"authorization")," object is present, but no rules are specified, no CRUD\noperations are allowed unless ",(0,i.kt)("inlineCode",{parentName:"p"},"adminSecret")," is passed.")),(0,i.kt)("h4",{id:"example"},"Example"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-json",metastring:'title="platformatic.db.json"',title:'"platformatic.db.json"'},'{\n "authorization": {\n "jwt": {\n "secret": "{PLT_AUTHORIZATION_JWT_SECRET}"\n },\n "rules": [\n ...\n ]\n }\n}\n')),(0,i.kt)("h3",{id:"telemetry"},(0,i.kt)("inlineCode",{parentName:"h3"},"telemetry")),(0,i.kt)("p",null,(0,i.kt)("a",{parentName:"p",href:"https://opentelemetry.io/"},"Open Telemetry")," is optionally supported with these settings:"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("strong",{parentName:"li"},(0,i.kt)("inlineCode",{parentName:"strong"},"serviceName"))," (",(0,i.kt)("strong",{parentName:"li"},"required"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"string"),") \u2014 Name of the service as will be reported in open telemetry."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("strong",{parentName:"li"},(0,i.kt)("inlineCode",{parentName:"strong"},"version"))," (",(0,i.kt)("inlineCode",{parentName:"li"},"string"),") \u2014 Optional version (free form)"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("strong",{parentName:"li"},(0,i.kt)("inlineCode",{parentName:"strong"},"skip"))," (",(0,i.kt)("inlineCode",{parentName:"li"},"array"),"). Optional list of operations to skip when exporting telemetry defined ",(0,i.kt)("inlineCode",{parentName:"li"},"object")," with properties: ",(0,i.kt)("ul",{parentName:"li"},(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"method"),": GET, POST, PUT, DELETE, PATCH, HEAD, OPTIONS, TRACE"),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"path"),". e.g.: ",(0,i.kt)("inlineCode",{parentName:"li"},"/documentation/json")," "))),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("strong",{parentName:"li"},(0,i.kt)("inlineCode",{parentName:"strong"},"exporter"))," (",(0,i.kt)("inlineCode",{parentName:"li"},"object")," or ",(0,i.kt)("inlineCode",{parentName:"li"},"array"),") \u2014 Exporter configuration. If not defined, the exporter defaults to ",(0,i.kt)("inlineCode",{parentName:"li"},"console"),". If an array of objects is configured, every object must be a valid exporter object. The exporter object has the following properties:",(0,i.kt)("ul",{parentName:"li"},(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("strong",{parentName:"li"},(0,i.kt)("inlineCode",{parentName:"strong"},"type"))," (",(0,i.kt)("inlineCode",{parentName:"li"},"string"),") \u2014 Exporter type. Supported values are ",(0,i.kt)("inlineCode",{parentName:"li"},"console"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"otlp"),", ",(0,i.kt)("inlineCode",{parentName:"li"},"zipkin")," and ",(0,i.kt)("inlineCode",{parentName:"li"},"memory")," (default: ",(0,i.kt)("inlineCode",{parentName:"li"},"console"),"). ",(0,i.kt)("inlineCode",{parentName:"li"},"memory")," is only supported for testing purposes. "),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("strong",{parentName:"li"},(0,i.kt)("inlineCode",{parentName:"strong"},"options"))," (",(0,i.kt)("inlineCode",{parentName:"li"},"object"),") \u2014 These options are supported:",(0,i.kt)("ul",{parentName:"li"},(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("strong",{parentName:"li"},(0,i.kt)("inlineCode",{parentName:"strong"},"url"))," (",(0,i.kt)("inlineCode",{parentName:"li"},"string"),") \u2014 The URL to send the telemetry to. Required for ",(0,i.kt)("inlineCode",{parentName:"li"},"otlp")," exporter. This has no effect on ",(0,i.kt)("inlineCode",{parentName:"li"},"console")," and ",(0,i.kt)("inlineCode",{parentName:"li"},"memory")," exporters."),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("strong",{parentName:"li"},(0,i.kt)("inlineCode",{parentName:"strong"},"headers"))," (",(0,i.kt)("inlineCode",{parentName:"li"},"object"),") \u2014 Optional headers to send with the telemetry. This has no effect on ",(0,i.kt)("inlineCode",{parentName:"li"},"console")," and ",(0,i.kt)("inlineCode",{parentName:"li"},"memory")," exporters.")))))),(0,i.kt)("p",null,"Note that OTLP traces can be consumed by different solutions, like ",(0,i.kt)("a",{parentName:"p",href:"https://www.jaegertracing.io/"},"Jaeger"),". ",(0,i.kt)("a",{parentName:"p",href:"https://opentelemetry.io/ecosystem/vendors/"},"Here")," the full list."),(0,i.kt)("p",null," ",(0,i.kt)("em",{parentName:"p"},"Example")),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-json"},'{\n "telemetry": {\n "serviceName": "test-service",\n "exporter": {\n "type": "otlp",\n "options": {\n "url": "http://localhost:4318/v1/traces"\n }\n }\n }\n}\n')),(0,i.kt)("h2",{id:"environment-variable-placeholders"},"Environment variable placeholders"),(0,i.kt)("p",null,"The value for any configuration setting can be replaced with an environment variable\nby adding a placeholder in the configuration file, for example ",(0,i.kt)("inlineCode",{parentName:"p"},"{PLT_SERVER_LOGGER_LEVEL}"),"."),(0,i.kt)("p",null,"All placeholders in a configuration must be available as an environment variable\nand must meet the ",(0,i.kt)("a",{parentName:"p",href:"#allowed-placeholder-names"},"allowed placeholder name")," rules."),(0,i.kt)("h3",{id:"example-1"},"Example"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-json",metastring:'title="platformatic.db.json"',title:'"platformatic.db.json"'},'{\n "db": {\n "connectionString": "{DATABASE_URL}"\n },\n "server": {\n "logger": {\n "level": "{PLT_SERVER_LOGGER_LEVEL}"\n },\n "port": "{PORT}"\n }\n}\n')),(0,i.kt)("p",null,"Platformatic will replace the placeholders in this example with the environment\nvariables of the same name."),(0,i.kt)("h3",{id:"setting-environment-variables"},"Setting environment variables"),(0,i.kt)("p",null,"If a ",(0,i.kt)("inlineCode",{parentName:"p"},".env")," file exists it will automatically be loaded by Platformatic using\n",(0,i.kt)("a",{parentName:"p",href:"https://github.com/motdotla/dotenv"},(0,i.kt)("inlineCode",{parentName:"a"},"dotenv")),". For example:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-plaintext",metastring:'title=".env"',title:'".env"'},"PLT_SERVER_LOGGER_LEVEL=info\nPORT=8080\n")),(0,i.kt)("p",null,"The ",(0,i.kt)("inlineCode",{parentName:"p"},".env")," file must be located in the same folder as the Platformatic configuration\nfile or in the current working directory."),(0,i.kt)("p",null,"Environment variables can also be set directly on the commmand line, for example:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"PLT_SERVER_LOGGER_LEVEL=debug npx platformatic db\n")),(0,i.kt)("h3",{id:"allowed-placeholder-names"},"Allowed placeholder names"),(0,i.kt)("p",null,"Only placeholder names prefixed with ",(0,i.kt)("inlineCode",{parentName:"p"},"PLT_"),", or that are in this allow list, will be\ndynamically replaced in the configuration file:"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"PORT")),(0,i.kt)("li",{parentName:"ul"},(0,i.kt)("inlineCode",{parentName:"li"},"DATABASE_URL"))),(0,i.kt)("p",null,"This restriction is to avoid accidentally exposing system environment variables.\nAn error will be raised by Platformatic if it finds a configuration placeholder\nthat isn't allowed."),(0,i.kt)("p",null,"The default allow list can be extended by passing a ",(0,i.kt)("inlineCode",{parentName:"p"},"--allow-env")," CLI option with a\ncomma separated list of strings, for example:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"npx platformatic db start --allow-env=HOST,SERVER_LOGGER_LEVEL\n# OR\nnpx platformatic start --allow-env=HOST,SERVER_LOGGER_LEVEL\n")),(0,i.kt)("p",null,"If ",(0,i.kt)("inlineCode",{parentName:"p"},"--allow-env")," is passed as an option to the CLI, it will be merged with the\ndefault allow list."),(0,i.kt)("h2",{id:"sample-configuration"},"Sample Configuration"),(0,i.kt)("p",null,"This is a bare minimum configuration for Platformatic DB. Uses a local ",(0,i.kt)("inlineCode",{parentName:"p"},"./db.sqlite")," SQLite database, with OpenAPI and GraphQL support."),(0,i.kt)("p",null,"Server will listen to ",(0,i.kt)("inlineCode",{parentName:"p"},"http://127.0.0.1:3042")),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-json"},'{\n "server": {\n "hostname": "127.0.0.1",\n "port": "3042"\n },\n "db": {\n "connectionString": "sqlite://./db.sqlite",\n "graphiql": true,\n "openapi": true,\n "graphql": true\n }\n}\n')))}k.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/0f99474f.3897e1da.js b/assets/js/0f99474f.3897e1da.js new file mode 100644 index 00000000000..1fe39950cb6 --- /dev/null +++ b/assets/js/0f99474f.3897e1da.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkplatformatic_oss_website=self.webpackChunkplatformatic_oss_website||[]).push([[27494],{3905:(e,t,r)=>{r.d(t,{Zo:()=>l,kt:()=>m});var n=r(67294);function o(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function i(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function c(e){for(var t=1;t=0||(o[r]=e[r]);return o}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(o[r]=e[r])}return o}var u=n.createContext({}),p=function(e){var t=n.useContext(u),r=t;return e&&(r="function"==typeof e?e(t):c(c({},t),e)),r},l=function(e){var t=p(e.components);return n.createElement(u.Provider,{value:t},e.children)},s="mdxType",f={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},b=n.forwardRef((function(e,t){var r=e.components,o=e.mdxType,i=e.originalType,u=e.parentName,l=a(e,["components","mdxType","originalType","parentName"]),s=p(r),b=o,m=s["".concat(u,".").concat(b)]||s[b]||f[b]||i;return r?n.createElement(m,c(c({ref:t},l),{},{components:r})):n.createElement(m,c({ref:t},l))}));function m(e,t){var r=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var i=r.length,c=new Array(i);c[0]=b;var a={};for(var u in t)hasOwnProperty.call(t,u)&&(a[u]=t[u]);a.originalType=e,a[s]="string"==typeof e?e:o,c[1]=a;for(var p=2;p{r.r(t),r.d(t,{assets:()=>u,contentTitle:()=>c,default:()=>f,frontMatter:()=>i,metadata:()=>a,toc:()=>p});var n=r(87462),o=(r(67294),r(3905));const i={},c="Contributing",a={unversionedId:"contributing/contributing",id:"version-0.41.3/contributing/contributing",title:"Contributing",description:"Please refer to the CONTRIBUTING.md",source:"@site/versioned_docs/version-0.41.3/contributing/contributing.md",sourceDirName:"contributing",slug:"/contributing/",permalink:"/docs/0.41.3/contributing/",draft:!1,editUrl:"https://github.com/platformatic/oss/edit/main/versioned_docs/version-0.41.3/contributing/contributing.md",tags:[],version:"0.41.3",frontMatter:{}},u={},p=[],l={toc:p},s="wrapper";function f(e){let{components:t,...r}=e;return(0,o.kt)(s,(0,n.Z)({},l,r,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h1",{id:"contributing"},"Contributing"),(0,o.kt)("p",null,"Please refer to the ",(0,o.kt)("a",{parentName:"p",href:"https://github.com/platformatic/platformatic/blob/main/CONTRIBUTING.md"},"CONTRIBUTING.md"),"\nin the root of the repo."))}f.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/0f9d8283.4e21151f.js b/assets/js/0f9d8283.4e21151f.js new file mode 100644 index 00000000000..e5401bad60c --- /dev/null +++ b/assets/js/0f9d8283.4e21151f.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkplatformatic_oss_website=self.webpackChunkplatformatic_oss_website||[]).push([[14683],{3905:(e,t,n)=>{n.d(t,{Zo:()=>u,kt:()=>f});var r=n(67294);function a(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function o(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function i(e){for(var t=1;t=0||(a[n]=e[n]);return a}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(a[n]=e[n])}return a}var s=r.createContext({}),p=function(e){var t=r.useContext(s),n=t;return e&&(n="function"==typeof e?e(t):i(i({},t),e)),n},u=function(e){var t=p(e.components);return r.createElement(s.Provider,{value:t},e.children)},c="mdxType",m={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},d=r.forwardRef((function(e,t){var n=e.components,a=e.mdxType,o=e.originalType,s=e.parentName,u=l(e,["components","mdxType","originalType","parentName"]),c=p(n),d=a,f=c["".concat(s,".").concat(d)]||c[d]||m[d]||o;return n?r.createElement(f,i(i({ref:t},u),{},{components:n})):r.createElement(f,i({ref:t},u))}));function f(e,t){var n=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var o=n.length,i=new Array(o);i[0]=d;var l={};for(var s in t)hasOwnProperty.call(t,s)&&(l[s]=t[s]);l.originalType=e,l[c]="string"==typeof e?e:a,i[1]=l;for(var p=2;p{n.r(t),n.d(t,{assets:()=>s,contentTitle:()=>i,default:()=>m,frontMatter:()=>o,metadata:()=>l,toc:()=>p});var r=n(87462),a=(n(67294),n(3905));const o={},i="Migrations",l={unversionedId:"reference/db/migrations",id:"reference/db/migrations",title:"Migrations",description:"It uses Postgrator under the hood to run migrations. Please refer to the Postgrator documentation for guidance on writing migration files.",source:"@site/docs/reference/db/migrations.md",sourceDirName:"reference/db",slug:"/reference/db/migrations",permalink:"/docs/next/reference/db/migrations",draft:!1,editUrl:"https://github.com/platformatic/platformatic/edit/main/docs/reference/db/migrations.md",tags:[],version:"current",frontMatter:{},sidebar:"docs",previous:{title:"Configuration",permalink:"/docs/next/reference/db/configuration"},next:{title:"Authorization",permalink:"/docs/next/reference/db/authorization/introduction"}},s={},p=[{value:"How to run migrations",id:"how-to-run-migrations",level:2},{value:"Automatically on server start",id:"automatically-on-server-start",level:3},{value:"Manually with the CLI",id:"manually-with-the-cli",level:3}],u={toc:p},c="wrapper";function m(e){let{components:t,...n}=e;return(0,a.kt)(c,(0,r.Z)({},u,n,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h1",{id:"migrations"},"Migrations"),(0,a.kt)("p",null,"It uses ",(0,a.kt)("a",{parentName:"p",href:"https://www.npmjs.com/package/postgrator"},"Postgrator")," under the hood to run migrations. Please refer to the ",(0,a.kt)("a",{parentName:"p",href:"https://github.com/rickbergfalk/postgrator"},"Postgrator documentation")," for guidance on writing migration files."),(0,a.kt)("p",null,"In brief, you should create a file structure like this"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre"},"migrations/\n |- 001.do.sql\n |- 001.undo.sql\n |- 002.do.sql\n |- 002.undo.sql\n |- 003.do.sql\n |- 003.undo.sql\n |- 004.do.sql\n |- 004.undo.sql\n |- ... and so on\n")),(0,a.kt)("p",null,"Postgrator uses a table in your schema, to store which migrations have been already processed, so that only new ones will be applied at every server start."),(0,a.kt)("p",null,"You can always rollback some migrations specifing what version you would like to rollback to."),(0,a.kt)("p",null,(0,a.kt)("em",{parentName:"p"},"Example")),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"$ platformatic db migrations apply --to 002\n")),(0,a.kt)("p",null,"Will execute ",(0,a.kt)("inlineCode",{parentName:"p"},"004.undo.sql"),", ",(0,a.kt)("inlineCode",{parentName:"p"},"003.undo.sql")," in this order. If you keep those files in migrations directory, when the server restarts it will execute ",(0,a.kt)("inlineCode",{parentName:"p"},"003.do.sql")," and ",(0,a.kt)("inlineCode",{parentName:"p"},"004.do.sql")," in this order if the ",(0,a.kt)("inlineCode",{parentName:"p"},"autoApply")," value is true, or you can run the ",(0,a.kt)("inlineCode",{parentName:"p"},"db migrations apply")," command."),(0,a.kt)("p",null,"It's also possible to rollback a single migration with ",(0,a.kt)("inlineCode",{parentName:"p"},"-r"),": "),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"$ platformatic db migrations apply -r \n")),(0,a.kt)("h2",{id:"how-to-run-migrations"},"How to run migrations"),(0,a.kt)("p",null,"There are two ways to run migrations in Platformatic DB. They can be processed automatically when the server starts if the ",(0,a.kt)("inlineCode",{parentName:"p"},"autoApply")," value is true, or you can just run the ",(0,a.kt)("inlineCode",{parentName:"p"},"db migrations apply")," command."),(0,a.kt)("p",null,"In both cases you have to edit your config file to tell Platformatic DB where are your migration files."),(0,a.kt)("h3",{id:"automatically-on-server-start"},"Automatically on server start"),(0,a.kt)("p",null,"To run migrations when Platformatic DB starts, you need to use the config file root property ",(0,a.kt)("inlineCode",{parentName:"p"},"migrations"),"."),(0,a.kt)("p",null,"There are two options in the ",(0,a.kt)("inlineCode",{parentName:"p"},'"migrations"')," property"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"dir")," (",(0,a.kt)("em",{parentName:"li"},"required"),") the directory where the migration files are located. It will be relative to the config file path."),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"autoApply")," a boolean value that tells Platformatic DB to auto-apply migrations or not (default: ",(0,a.kt)("inlineCode",{parentName:"li"},"false"),")")),(0,a.kt)("p",null,(0,a.kt)("em",{parentName:"p"},"Example")),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-json"},'{\n ...\n "migrations": {\n "dir": "./path/to/migrations/folder",\n "autoApply": false\n }\n}\n')),(0,a.kt)("h3",{id:"manually-with-the-cli"},"Manually with the CLI"),(0,a.kt)("p",null,"See documentation about ",(0,a.kt)("inlineCode",{parentName:"p"},"db migrations apply")," ",(0,a.kt)("a",{parentName:"p",href:"../cli#migrate"},"command")),(0,a.kt)("p",null,"In short:"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"be sure to define a correct ",(0,a.kt)("inlineCode",{parentName:"li"},"migrations.dir")," folder under the config on ",(0,a.kt)("inlineCode",{parentName:"li"},"platformatic.db.json")),(0,a.kt)("li",{parentName:"ul"},"get the ",(0,a.kt)("inlineCode",{parentName:"li"},"MIGRATION_NUMBER")," (f.e. if the file is named ",(0,a.kt)("inlineCode",{parentName:"li"},"002.do.sql")," will be ",(0,a.kt)("inlineCode",{parentName:"li"},"002"),")"),(0,a.kt)("li",{parentName:"ul"},"run ",(0,a.kt)("inlineCode",{parentName:"li"},"npx platformatic db migrations apply --to MIGRATION_NUMBER"))))}m.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/10394567.7c121738.js b/assets/js/10394567.7c121738.js new file mode 100644 index 00000000000..5eda90dfdf2 --- /dev/null +++ b/assets/js/10394567.7c121738.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkplatformatic_oss_website=self.webpackChunkplatformatic_oss_website||[]).push([[50926],{3905:(e,n,t)=>{t.d(n,{Zo:()=>c,kt:()=>d});var r=t(67294);function a(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function i(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);n&&(r=r.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,r)}return t}function o(e){for(var n=1;n=0||(a[t]=e[t]);return a}(e,n);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(a[t]=e[t])}return a}var s=r.createContext({}),p=function(e){var n=r.useContext(s),t=n;return e&&(t="function"==typeof e?e(n):o(o({},n),e)),t},c=function(e){var n=p(e.components);return r.createElement(s.Provider,{value:n},e.children)},u="mdxType",m={inlineCode:"code",wrapper:function(e){var n=e.children;return r.createElement(r.Fragment,{},n)}},g=r.forwardRef((function(e,n){var t=e.components,a=e.mdxType,i=e.originalType,s=e.parentName,c=l(e,["components","mdxType","originalType","parentName"]),u=p(t),g=a,d=u["".concat(s,".").concat(g)]||u[g]||m[g]||i;return t?r.createElement(d,o(o({ref:n},c),{},{components:t})):r.createElement(d,o({ref:n},c))}));function d(e,n){var t=arguments,a=n&&n.mdxType;if("string"==typeof e||a){var i=t.length,o=new Array(i);o[0]=g;var l={};for(var s in n)hasOwnProperty.call(n,s)&&(l[s]=n[s]);l.originalType=e,l[u]="string"==typeof e?e:a,o[1]=l;for(var p=2;p{t.r(n),t.d(n,{assets:()=>s,contentTitle:()=>o,default:()=>m,frontMatter:()=>i,metadata:()=>l,toc:()=>p});var r=t(87462),a=(t(67294),t(3905));const i={},o="Mutations",l={unversionedId:"reference/sql-graphql/mutations",id:"version-0.41.3/reference/sql-graphql/mutations",title:"Mutations",description:"When the GraphQL plugin is loaded, some mutations are automatically adding to",source:"@site/versioned_docs/version-0.41.3/reference/sql-graphql/mutations.md",sourceDirName:"reference/sql-graphql",slug:"/reference/sql-graphql/mutations",permalink:"/docs/0.41.3/reference/sql-graphql/mutations",draft:!1,editUrl:"https://github.com/platformatic/oss/edit/main/versioned_docs/version-0.41.3/reference/sql-graphql/mutations.md",tags:[],version:"0.41.3",frontMatter:{},sidebar:"docs",previous:{title:"Queries",permalink:"/docs/0.41.3/reference/sql-graphql/queries"},next:{title:"Many To Many Relationship",permalink:"/docs/0.41.3/reference/sql-graphql/many-to-many"}},s={},p=[{value:"save[ENTITY]",id:"saveentity",level:2},{value:"Example",id:"example",level:3},{value:"insert[ENTITY]",id:"insertentity",level:2},{value:"Example",id:"example-1",level:3},{value:"delete[ENTITIES]",id:"deleteentities",level:2},{value:"Example",id:"example-2",level:3}],c={toc:p},u="wrapper";function m(e){let{components:n,...t}=e;return(0,a.kt)(u,(0,r.Z)({},c,t,{components:n,mdxType:"MDXLayout"}),(0,a.kt)("h1",{id:"mutations"},"Mutations"),(0,a.kt)("p",null,"When the GraphQL plugin is loaded, some mutations are automatically adding to\nthe GraphQL schema."),(0,a.kt)("h2",{id:"saveentity"},(0,a.kt)("inlineCode",{parentName:"h2"},"save[ENTITY]")),(0,a.kt)("p",null,"Saves a new entity to the database or updates an existing entity.\nThis actually behaves as an ",(0,a.kt)("inlineCode",{parentName:"p"},"upsert"),", allowing both behaviours depending on the presence of the primary key field."),(0,a.kt)("h3",{id:"example"},"Example"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-js"},"'use strict'\n\nconst Fastify = require('fastify')\nconst graphqlPlugin = require('@platformatic/sql-graphql')\nconst sqlMapper = require('@platformatic/sql-mapper')\n\nasync function main() {\n const app = Fastify({\n logger: {\n level: 'info'\n }\n })\n app.register(sqlMapper, {\n connectionString: 'postgres://postgres:postgres@127.0.0.1/postgres',\n log: logger,\n })\n app.register(graphqlPlugin, {\n graphiql: true\n })\n const res = await app.inject({\n method: 'POST',\n url: '/graphql',\n body: {\n query: `\n mutation {\n savePage(input: { id: 3 title: \"Platformatic is cool!\" }) {\n id\n title\n }\n }\n `\n }\n })\n const result = await res.json()\n console.log(result.data) // { savePage: { id: '3', title: 'Platformatic is cool!' } }\n await app.close()\n}\n\nmain()\n")),(0,a.kt)("h2",{id:"insertentity"},(0,a.kt)("inlineCode",{parentName:"h2"},"insert[ENTITY]")),(0,a.kt)("p",null,"Inserts a new entity in the database."),(0,a.kt)("h3",{id:"example-1"},"Example"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-js"},"'use strict'\n\nconst Fastify = require('fastify')\nconst graphqlPlugin = require('@platformatic/sql-graphql')\nconst sqlMapper = require('@platformatic/sql-mapper')\n\nasync function main() {\n const app = Fastify({\n logger: {\n level: 'info'\n }\n })\n app.register(sqlMapper, {\n connectionString: 'postgres://postgres:postgres@127.0.0.1/postgres',\n log: logger,\n })\n app.register(graphqlPlugin, {\n graphiql: true\n })\n const res = await app.inject({\n method: 'POST',\n url: '/graphql',\n body: {\n query: `\n mutation {\n savePage(input: { title: \"Platformatic is cool!\" }) {\n id\n title\n }\n }\n `\n }\n })\n const result = await res.json()\n console.log(result.data) // { savePage: { id: '4', title: 'Platformatic is cool!' } }\n await app.close()\n}\n\nmain()\n")),(0,a.kt)("h2",{id:"deleteentities"},(0,a.kt)("inlineCode",{parentName:"h2"},"delete[ENTITIES]")),(0,a.kt)("p",null,"Deletes one or more entities from the database, based on the ",(0,a.kt)("inlineCode",{parentName:"p"},"where")," clause\npassed as an input to the mutation."),(0,a.kt)("h3",{id:"example-2"},"Example"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-js"},"'use strict'\n\nconst Fastify = require('fastify')\nconst graphqlPlugin = require('@platformatic/sql-graphql')\nconst sqlMapper = require('@platformatic/sql-mapper')\n\nasync function main() {\n const app = Fastify({\n logger: {\n level: 'info'\n }\n })\n app.register(sqlMapper, {\n connectionString: 'postgres://postgres:postgres@127.0.0.1/postgres',\n log: logger,\n })\n app.register(graphqlPlugin, {\n graphiql: true\n })\n const res = await app.inject({\n method: 'POST',\n url: '/graphql',\n body: {\n query: `\n mutation {\n deletePages(where: { id: { eq: \"3\" } }) {\n id\n title\n }\n }\n `\n }\n })\n const result = await res.json()\n console.log(result.data) // { deletePages: [ { id: '3', title: 'Platformatic is cool!' } ] }\n await app.close()\n}\n\nmain()\n")))}m.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/10e966f5.541d7376.js b/assets/js/10e966f5.541d7376.js new file mode 100644 index 00000000000..a087bdadbeb --- /dev/null +++ b/assets/js/10e966f5.541d7376.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkplatformatic_oss_website=self.webpackChunkplatformatic_oss_website||[]).push([[25512],{3905:(e,t,n)=>{n.d(t,{Zo:()=>p,kt:()=>m});var r=n(67294);function i(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function o(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function a(e){for(var t=1;t=0||(i[n]=e[n]);return i}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(i[n]=e[n])}return i}var l=r.createContext({}),c=function(e){var t=r.useContext(l),n=t;return e&&(n="function"==typeof e?e(t):a(a({},t),e)),n},p=function(e){var t=c(e.components);return r.createElement(l.Provider,{value:t},e.children)},u="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},f=r.forwardRef((function(e,t){var n=e.components,i=e.mdxType,o=e.originalType,l=e.parentName,p=s(e,["components","mdxType","originalType","parentName"]),u=c(n),f=i,m=u["".concat(l,".").concat(f)]||u[f]||d[f]||o;return n?r.createElement(m,a(a({ref:t},p),{},{components:n})):r.createElement(m,a({ref:t},p))}));function m(e,t){var n=arguments,i=t&&t.mdxType;if("string"==typeof e||i){var o=n.length,a=new Array(o);a[0]=f;var s={};for(var l in t)hasOwnProperty.call(t,l)&&(s[l]=t[l]);s.originalType=e,s[u]="string"==typeof e?e:i,a[1]=s;for(var c=2;c{n.r(t),n.d(t,{assets:()=>l,contentTitle:()=>a,default:()=>d,frontMatter:()=>o,metadata:()=>s,toc:()=>c});var r=n(87462),i=(n(67294),n(3905));const o={},a="Subscription",s={unversionedId:"reference/sql-graphql/subscriptions",id:"version-0.41.1/reference/sql-graphql/subscriptions",title:"Subscription",description:"When the GraphQL plugin is loaded, some subscriptions are automatically adding to",source:"@site/versioned_docs/version-0.41.1/reference/sql-graphql/subscriptions.md",sourceDirName:"reference/sql-graphql",slug:"/reference/sql-graphql/subscriptions",permalink:"/docs/0.41.1/reference/sql-graphql/subscriptions",draft:!1,editUrl:"https://github.com/platformatic/oss/edit/main/versioned_docs/version-0.41.1/reference/sql-graphql/subscriptions.md",tags:[],version:"0.41.1",frontMatter:{}},l={},c=[{value:"[ENTITY]Saved",id:"entitysaved",level:2},{value:"[ENTITY]Deleted",id:"entitydeleted",level:2}],p={toc:c},u="wrapper";function d(e){let{components:t,...n}=e;return(0,i.kt)(u,(0,r.Z)({},p,n,{components:t,mdxType:"MDXLayout"}),(0,i.kt)("h1",{id:"subscription"},"Subscription"),(0,i.kt)("p",null,"When the GraphQL plugin is loaded, some subscriptions are automatically adding to\nthe GraphQL schema if the ",(0,i.kt)("inlineCode",{parentName:"p"},"@platformatic/sql-events")," plugin has been previously registered."),(0,i.kt)("p",null,"It's possible to avoid creating the subscriptions for a given entity by adding the ",(0,i.kt)("inlineCode",{parentName:"p"},"subscriptionIgnore")," config,\nlike so: ",(0,i.kt)("inlineCode",{parentName:"p"},"subscriptionIgnore: ['page']"),"."),(0,i.kt)("h2",{id:"entitysaved"},(0,i.kt)("inlineCode",{parentName:"h2"},"[ENTITY]Saved")),(0,i.kt)("p",null,"Published whenever an entity is saved, e.g. when the mutation ",(0,i.kt)("inlineCode",{parentName:"p"},"insert[ENTITY]")," or ",(0,i.kt)("inlineCode",{parentName:"p"},"save[ENTITY]")," are called."),(0,i.kt)("h2",{id:"entitydeleted"},(0,i.kt)("inlineCode",{parentName:"h2"},"[ENTITY]Deleted")),(0,i.kt)("p",null,"Published whenever an entity is deleted, e.g. when the mutation ",(0,i.kt)("inlineCode",{parentName:"p"},"delete[ENTITY]")," is called.."))}d.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/10f0d52a.e3a82e0b.js b/assets/js/10f0d52a.e3a82e0b.js new file mode 100644 index 00000000000..01b8b9be009 --- /dev/null +++ b/assets/js/10f0d52a.e3a82e0b.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkplatformatic_oss_website=self.webpackChunkplatformatic_oss_website||[]).push([[71931],{3905:(e,n,r)=>{r.d(n,{Zo:()=>c,kt:()=>m});var t=r(67294);function i(e,n,r){return n in e?Object.defineProperty(e,n,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[n]=r,e}function o(e,n){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var t=Object.getOwnPropertySymbols(e);n&&(t=t.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),r.push.apply(r,t)}return r}function a(e){for(var n=1;n=0||(i[r]=e[r]);return i}(e,n);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(t=0;t=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(i[r]=e[r])}return i}var p=t.createContext({}),s=function(e){var n=t.useContext(p),r=n;return e&&(r="function"==typeof e?e(n):a(a({},n),e)),r},c=function(e){var n=s(e.components);return t.createElement(p.Provider,{value:n},e.children)},f="mdxType",u={inlineCode:"code",wrapper:function(e){var n=e.children;return t.createElement(t.Fragment,{},n)}},d=t.forwardRef((function(e,n){var r=e.components,i=e.mdxType,o=e.originalType,p=e.parentName,c=l(e,["components","mdxType","originalType","parentName"]),f=s(r),d=i,m=f["".concat(p,".").concat(d)]||f[d]||u[d]||o;return r?t.createElement(m,a(a({ref:n},c),{},{components:r})):t.createElement(m,a({ref:n},c))}));function m(e,n){var r=arguments,i=n&&n.mdxType;if("string"==typeof e||i){var o=r.length,a=new Array(o);a[0]=d;var l={};for(var p in n)hasOwnProperty.call(n,p)&&(l[p]=n[p]);l.originalType=e,l[f]="string"==typeof e?e:i,a[1]=l;for(var s=2;s{r.r(n),r.d(n,{assets:()=>p,contentTitle:()=>a,default:()=>u,frontMatter:()=>o,metadata:()=>l,toc:()=>s});var t=r(87462),i=(r(67294),r(3905));const o={},a="Ignoring entities and fields",l={unversionedId:"reference/sql-openapi/ignore",id:"version-0.42.1/reference/sql-openapi/ignore",title:"Ignoring entities and fields",description:"@platformatic/sql-openapi allows to selectively ignore entities and fields.",source:"@site/versioned_docs/version-0.42.1/reference/sql-openapi/ignore.md",sourceDirName:"reference/sql-openapi",slug:"/reference/sql-openapi/ignore",permalink:"/docs/reference/sql-openapi/ignore",draft:!1,editUrl:"https://github.com/platformatic/oss/edit/main/versioned_docs/version-0.42.1/reference/sql-openapi/ignore.md",tags:[],version:"0.42.1",frontMatter:{},sidebar:"docs",previous:{title:"API",permalink:"/docs/reference/sql-openapi/api"},next:{title:"Introduction to the GraphQL API",permalink:"/docs/reference/sql-graphql/introduction"}},p={},s=[],c={toc:s},f="wrapper";function u(e){let{components:n,...r}=e;return(0,i.kt)(f,(0,t.Z)({},c,r,{components:n,mdxType:"MDXLayout"}),(0,i.kt)("h1",{id:"ignoring-entities-and-fields"},"Ignoring entities and fields"),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"@platformatic/sql-openapi")," allows to selectively ignore entities and fields."),(0,i.kt)("p",null,"To ignore entites:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-javascript"},"app.register(require('@platformatic/sql-openapi'), {\n ignore: {\n categories: true\n }\n})\n")),(0,i.kt)("p",null,"To ignore individual fields:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-javascript"},"app.register(require('@platformatic/sql-openapi'), {\n ignore: {\n categories: {\n name: true\n }\n }\n})\n")))}u.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/11ea3068.5fce7c6e.js b/assets/js/11ea3068.5fce7c6e.js new file mode 100644 index 00000000000..1fe3fbf6b51 --- /dev/null +++ b/assets/js/11ea3068.5fce7c6e.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkplatformatic_oss_website=self.webpackChunkplatformatic_oss_website||[]).push([[1676],{3905:(e,n,t)=>{t.d(n,{Zo:()=>c,kt:()=>y});var r=t(67294);function a(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function o(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);n&&(r=r.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,r)}return t}function i(e){for(var n=1;n=0||(a[t]=e[t]);return a}(e,n);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(a[t]=e[t])}return a}var s=r.createContext({}),p=function(e){var n=r.useContext(s),t=n;return e&&(t="function"==typeof e?e(n):i(i({},n),e)),t},c=function(e){var n=p(e.components);return r.createElement(s.Provider,{value:n},e.children)},u="mdxType",m={inlineCode:"code",wrapper:function(e){var n=e.children;return r.createElement(r.Fragment,{},n)}},d=r.forwardRef((function(e,n){var t=e.components,a=e.mdxType,o=e.originalType,s=e.parentName,c=l(e,["components","mdxType","originalType","parentName"]),u=p(t),d=a,y=u["".concat(s,".").concat(d)]||u[d]||m[d]||o;return t?r.createElement(y,i(i({ref:n},c),{},{components:t})):r.createElement(y,i({ref:n},c))}));function y(e,n){var t=arguments,a=n&&n.mdxType;if("string"==typeof e||a){var o=t.length,i=new Array(o);i[0]=d;var l={};for(var s in n)hasOwnProperty.call(n,s)&&(l[s]=n[s]);l.originalType=e,l[u]="string"==typeof e?e:a,i[1]=l;for(var p=2;p{t.r(n),t.d(n,{assets:()=>s,contentTitle:()=>i,default:()=>m,frontMatter:()=>o,metadata:()=>l,toc:()=>p});var r=t(87462),a=(t(67294),t(3905));const o={},i="Many To Many Relationship",l={unversionedId:"reference/sql-graphql/many-to-many",id:"version-0.42.0/reference/sql-graphql/many-to-many",title:"Many To Many Relationship",description:"Many-to-Many relationship lets you relate each row in one table to many rows in",source:"@site/versioned_docs/version-0.42.0/reference/sql-graphql/many-to-many.md",sourceDirName:"reference/sql-graphql",slug:"/reference/sql-graphql/many-to-many",permalink:"/docs/0.42.0/reference/sql-graphql/many-to-many",draft:!1,editUrl:"https://github.com/platformatic/oss/edit/main/versioned_docs/version-0.42.0/reference/sql-graphql/many-to-many.md",tags:[],version:"0.42.0",frontMatter:{},sidebar:"docs",previous:{title:"Mutations",permalink:"/docs/0.42.0/reference/sql-graphql/mutations"},next:{title:"Ignoring types and fields",permalink:"/docs/0.42.0/reference/sql-graphql/ignore"}},s={},p=[{value:"Example",id:"example",level:2}],c={toc:p},u="wrapper";function m(e){let{components:n,...t}=e;return(0,a.kt)(u,(0,r.Z)({},c,t,{components:n,mdxType:"MDXLayout"}),(0,a.kt)("h1",{id:"many-to-many-relationship"},"Many To Many Relationship"),(0,a.kt)("p",null,"Many-to-Many relationship lets you relate each row in one table to many rows in\nanother table and vice versa. "),(0,a.kt)("p",null,'Many-to-many relationship are implemented in SQL via a "join table", a table whose ',(0,a.kt)("strong",{parentName:"p"},"primary key"),"\nis composed by the identifier of the two parts of the many-to-many relationship."),(0,a.kt)("p",null,"Platformatic DB fully support many-to-many relationships on all supported databases."),(0,a.kt)("h2",{id:"example"},"Example"),(0,a.kt)("p",null,"Consider the following schema (SQLite):"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-SQL"},"CREATE TABLE pages (\n id INTEGER PRIMARY KEY,\n the_title VARCHAR(42)\n);\n\nCREATE TABLE users (\n id INTEGER PRIMARY KEY,\n username VARCHAR(255) NOT NULL\n);\n\nCREATE TABLE editors (\n page_id INTEGER NOT NULL,\n user_id INTEGER NOT NULL,\n role VARCHAR(255) NOT NULL,\n CONSTRAINT fk_editor_pages FOREIGN KEY (page_id) REFERENCES pages(id),\n CONSTRAINT fk_editor_users FOREIGN KEY (user_id) REFERENCES users(id),\n PRIMARY KEY (page_id, user_id)\n);\n")),(0,a.kt)("p",null,"The table ",(0,a.kt)("inlineCode",{parentName:"p"},"editors"),' is a "join table" between users and pages.\nGiven this schema, you could issue queries like:'),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-graphql"},"query {\n editors(orderBy: { field: role, direction: DESC }) {\n user {\n id\n username\n }\n page {\n id\n theTitle\n }\n role\n }\n}\n")),(0,a.kt)("p",null,"Mutation works exactly the same as before:"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-graphql"},'mutation {\n saveEditor(input: { userId: "1", pageId: "1", role: "captain" }) {\n user {\n id\n username\n }\n page {\n id\n theTitle\n }\n role\n }\n}\n')))}m.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/11f0461c.64e0cb22.js b/assets/js/11f0461c.64e0cb22.js new file mode 100644 index 00000000000..484bf30f50b --- /dev/null +++ b/assets/js/11f0461c.64e0cb22.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkplatformatic_oss_website=self.webpackChunkplatformatic_oss_website||[]).push([[13003],{3905:(e,t,a)=>{a.d(t,{Zo:()=>d,kt:()=>f});var n=a(67294);function r(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function s(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function o(e){for(var t=1;t=0||(r[a]=e[a]);return r}(e,t);if(Object.getOwnPropertySymbols){var s=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(r[a]=e[a])}return r}var l=n.createContext({}),c=function(e){var t=n.useContext(l),a=t;return e&&(a="function"==typeof e?e(t):o(o({},t),e)),a},d=function(e){var t=c(e.components);return n.createElement(l.Provider,{value:t},e.children)},p="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},m=n.forwardRef((function(e,t){var a=e.components,r=e.mdxType,s=e.originalType,l=e.parentName,d=i(e,["components","mdxType","originalType","parentName"]),p=c(a),m=r,f=p["".concat(l,".").concat(m)]||p[m]||u[m]||s;return a?n.createElement(f,o(o({ref:t},d),{},{components:a})):n.createElement(f,o({ref:t},d))}));function f(e,t){var a=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var s=a.length,o=new Array(s);o[0]=m;var i={};for(var l in t)hasOwnProperty.call(t,l)&&(i[l]=t[l]);i.originalType=e,i[p]="string"==typeof e?e:r,o[1]=i;for(var c=2;c{a.r(t),a.d(t,{assets:()=>l,contentTitle:()=>o,default:()=>u,frontMatter:()=>s,metadata:()=>i,toc:()=>c});var n=a(87462),r=(a(67294),a(3905));const s={},o="Seed a Database",i={unversionedId:"guides/seed-a-database",id:"version-0.41.3/guides/seed-a-database",title:"Seed a Database",description:"A database is as useful as the data that it contains: a fresh, empty database",source:"@site/versioned_docs/version-0.41.3/guides/seed-a-database.md",sourceDirName:"guides",slug:"/guides/seed-a-database",permalink:"/docs/0.41.3/guides/seed-a-database",draft:!1,editUrl:"https://github.com/platformatic/oss/edit/main/versioned_docs/version-0.41.3/guides/seed-a-database.md",tags:[],version:"0.41.3",frontMatter:{},sidebar:"docs",previous:{title:"Advanced Fly.io Deployment",permalink:"/docs/0.41.3/guides/deployment/advanced-fly-io-deployment"},next:{title:"Add Custom Functionality",permalink:"/docs/0.41.3/guides/add-custom-functionality/introduction"}},l={},c=[{value:"Example",id:"example",level:2}],d={toc:c},p="wrapper";function u(e){let{components:t,...a}=e;return(0,r.kt)(p,(0,n.Z)({},d,a,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("h1",{id:"seed-a-database"},"Seed a Database"),(0,r.kt)("p",null,"A database is as useful as the data that it contains: a fresh, empty database\nisn't always the best starting point. We can add a few rows from our migrations\nusing SQL, but we might need to use JavaScript from time to time."),(0,r.kt)("p",null,"The ",(0,r.kt)("a",{parentName:"p",href:"/docs/0.41.3/reference/cli#seed"},"platformatic db seed"),' command allows us to run a\nscript that will populate \u2014 or "seed" \u2014 our database.'),(0,r.kt)("h2",{id:"example"},"Example"),(0,r.kt)("p",null,"Our seed script should export a ",(0,r.kt)("inlineCode",{parentName:"p"},"Function")," that accepts an argument:\nan instance of ",(0,r.kt)("a",{parentName:"p",href:"/docs/0.41.3/reference/sql-mapper/introduction"},(0,r.kt)("inlineCode",{parentName:"a"},"@platformatic/sql-mapper")),"."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-javascript",metastring:'title="seed.js"',title:'"seed.js"'},"'use strict'\n\nmodule.exports = async function ({ entities, db, sql }) {\n await entities.graph.save({ input: { name: 'Hello' } })\n await db.query(sql`\n INSERT INTO graphs (name) VALUES ('Hello 2');\n `)\n}\n")),(0,r.kt)("p",null,"We can then run the seed script with the Platformatic CLI:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"npx platformatic db seed seed.js\n")))}u.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/11ff76f1.6db14211.js b/assets/js/11ff76f1.6db14211.js new file mode 100644 index 00000000000..c721a1c582a --- /dev/null +++ b/assets/js/11ff76f1.6db14211.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkplatformatic_oss_website=self.webpackChunkplatformatic_oss_website||[]).push([[50733],{3905:(e,t,n)=>{n.d(t,{Zo:()=>d,kt:()=>g});var a=n(67294);function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function r(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);t&&(a=a.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,a)}return n}function i(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var p=a.createContext({}),s=function(e){var t=a.useContext(p),n=t;return e&&(n="function"==typeof e?e(t):i(i({},t),e)),n},d=function(e){var t=s(e.components);return a.createElement(p.Provider,{value:t},e.children)},c="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return a.createElement(a.Fragment,{},t)}},m=a.forwardRef((function(e,t){var n=e.components,o=e.mdxType,r=e.originalType,p=e.parentName,d=l(e,["components","mdxType","originalType","parentName"]),c=s(n),m=o,g=c["".concat(p,".").concat(m)]||c[m]||u[m]||r;return n?a.createElement(g,i(i({ref:t},d),{},{components:n})):a.createElement(g,i({ref:t},d))}));function g(e,t){var n=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var r=n.length,i=new Array(r);i[0]=m;var l={};for(var p in t)hasOwnProperty.call(t,p)&&(l[p]=t[p]);l.originalType=e,l[c]="string"==typeof e?e:o,i[1]=l;for(var s=2;s{n.r(t),n.d(t,{assets:()=>p,contentTitle:()=>i,default:()=>u,frontMatter:()=>r,metadata:()=>l,toc:()=>s});var a=n(87462),o=(n(67294),n(3905));const r={},i="Deploy to Fly.io with SQLite",l={unversionedId:"guides/deployment/deploy-to-fly-io-with-sqlite",id:"version-0.41.1/guides/deployment/deploy-to-fly-io-with-sqlite",title:"Deploy to Fly.io with SQLite",description:"To follow this how-to guide, you'll first need to install the Fly CLI and create",source:"@site/versioned_docs/version-0.41.1/guides/deployment/deploy-to-fly-io-with-sqlite.md",sourceDirName:"guides/deployment",slug:"/guides/deployment/deploy-to-fly-io-with-sqlite",permalink:"/docs/0.41.1/guides/deployment/deploy-to-fly-io-with-sqlite",draft:!1,editUrl:"https://github.com/platformatic/oss/edit/main/versioned_docs/version-0.41.1/guides/deployment/deploy-to-fly-io-with-sqlite.md",tags:[],version:"0.41.1",frontMatter:{},sidebar:"docs",previous:{title:"Deployment",permalink:"/docs/0.41.1/guides/deployment/"},next:{title:"Advanced Fly.io Deployment",permalink:"/docs/0.41.1/guides/deployment/advanced-fly-io-deployment"}},p={},s=[{value:"Explicit builder",id:"explicit-builder",level:2},{value:"Database storage",id:"database-storage",level:2},{value:"Configure server",id:"configure-server",level:2},{value:"Configure environment",id:"configure-environment",level:2},{value:"Deploy application",id:"deploy-application",level:2}],d={toc:s},c="wrapper";function u(e){let{components:t,...n}=e;return(0,o.kt)(c,(0,a.Z)({},d,n,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h1",{id:"deploy-to-flyio-with-sqlite"},"Deploy to Fly.io with SQLite"),(0,o.kt)("admonition",{type:"note"},(0,o.kt)("p",{parentName:"admonition"},"To follow this how-to guide, you'll first need to install the Fly CLI and create\nan account by ",(0,o.kt)("a",{parentName:"p",href:"https://fly.io/docs/hands-on/"},"following this official guide"),".\nYou will also need an existing Platformatic DB project, please check out our\ngetting started guide if needed.")),(0,o.kt)("p",null,"Navigate to your Platformatic DB project in the terminal on your local machine.\nRun ",(0,o.kt)("inlineCode",{parentName:"p"},"fly launch"),' and follow the prompts. When it asks if you want to deploy\nnow, say "no" as there are a few things that you\'ll need to configure first.'),(0,o.kt)("p",null,"You can also create the fly application with one line. This will create your\napplication in London (",(0,o.kt)("inlineCode",{parentName:"p"},"lhr"),"):"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"fly launch --no-deploy --generate-name --region lhr --org personal --path .\n")),(0,o.kt)("p",null,"The ",(0,o.kt)("inlineCode",{parentName:"p"},"fly")," CLI should have created a ",(0,o.kt)("strong",{parentName:"p"},"fly.toml")," file in your project\ndirectory."),(0,o.kt)("h2",{id:"explicit-builder"},"Explicit builder"),(0,o.kt)("p",null,"The ",(0,o.kt)("strong",{parentName:"p"},"fly.toml")," file may be missing an explicit builder setting. To have\nconsistent builds, it is best to add a ",(0,o.kt)("inlineCode",{parentName:"p"},"build")," section:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-toml"},'[build]\n builder = "heroku/buildpacks:20"\n')),(0,o.kt)("h2",{id:"database-storage"},"Database storage"),(0,o.kt)("p",null,"Create a volume for database storage, naming it ",(0,o.kt)("inlineCode",{parentName:"p"},"data"),":"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"fly volumes create data\n")),(0,o.kt)("p",null,"This will create storage in the same region as the application. The volume\ndefaults to 3GB size, use ",(0,o.kt)("inlineCode",{parentName:"p"},"-s")," to change the size. For example, ",(0,o.kt)("inlineCode",{parentName:"p"},"-s 10")," is 10GB."),(0,o.kt)("p",null,"Add a ",(0,o.kt)("inlineCode",{parentName:"p"},"mounts")," section in ",(0,o.kt)("strong",{parentName:"p"},"fly.toml"),":"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-toml"},'[mounts]\n source = "data"\n destination = "/app/.platformatic/data"\n')),(0,o.kt)("p",null,"Create a directory in your project where your SQLite database will be created:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"mkdir -p .platformatic/data\n\ntouch .platformatic/data/.gitkeep\n")),(0,o.kt)("p",null,"The ",(0,o.kt)("inlineCode",{parentName:"p"},".gitkeep")," file ensures that this directory will always be created when\nyour application is deployed."),(0,o.kt)("p",null,"You should also ensure that your SQLite database is ignored by Git. This helps\navoid inconsistencies when your application is deployed:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},'echo "*.db" >> .gitignore\n')),(0,o.kt)("p",null,"The command above assumes that your SQLite database file ends with the extension\n",(0,o.kt)("inlineCode",{parentName:"p"},".db")," \u2014 if the extension is different then you must change the command to match."),(0,o.kt)("p",null,"Change the connection string to an environment variable and make sure that\nmigrations are ",(0,o.kt)("inlineCode",{parentName:"p"},"autoApply"),"ing (for ",(0,o.kt)("inlineCode",{parentName:"p"},"platformatic@^0.4.0"),") in ",(0,o.kt)("strong",{parentName:"p"},"platformatic.db.json"),":"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-json"},'{\n "db": {\n "connectionString": "{DATABASE_URL}"\n },\n "migrations": {\n "dir": "./migrations",\n "autoApply": true\n }\n}\n')),(0,o.kt)("h2",{id:"configure-server"},"Configure server"),(0,o.kt)("p",null,"Make sure that your ",(0,o.kt)("strong",{parentName:"p"},"platformatic.db.json")," uses environment variables\nfor the server section:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-json"},'{\n "server": {\n "logger": {\n "level": "{PLT_SERVER_LOGGER_LEVEL}"\n },\n "hostname": "{PLT_SERVER_HOSTNAME}",\n "port": "{PORT}"\n }\n}\n')),(0,o.kt)("h2",{id:"configure-environment"},"Configure environment"),(0,o.kt)("p",null,"Start with your local environment, create a ",(0,o.kt)("strong",{parentName:"p"},".env")," file and put the following:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"PORT=3042\nPLT_SERVER_HOSTNAME=127.0.0.1\nPLT_SERVER_LOGGER_LEVEL=debug\nDATABASE_URL=sqlite://.platformatic/data/movie-quotes.db\n")),(0,o.kt)("p",null,"Avoid accidental leaks by ignoring your ",(0,o.kt)("strong",{parentName:"p"},".env")," file:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},'echo ".env" >> .gitignore\n')),(0,o.kt)("p",null,"This same configuration needs to added to ",(0,o.kt)("strong",{parentName:"p"},"fly.toml"),":"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-toml"},'[env]\n PORT = 8080\n PLT_SERVER_HOSTNAME = "0.0.0.0"\n PLT_SERVER_LOGGER_LEVEL = "info"\n DATABASE_URL = "sqlite:///app/.platformatic/data/movie-quotes.db"\n')),(0,o.kt)("h2",{id:"deploy-application"},"Deploy application"),(0,o.kt)("p",null,"A valid ",(0,o.kt)("strong",{parentName:"p"},"package.json")," will be needed so if you do not have one, generate one\nby running ",(0,o.kt)("inlineCode",{parentName:"p"},"npm init"),"."),(0,o.kt)("p",null,"In your ",(0,o.kt)("strong",{parentName:"p"},"package.json"),", make sure there is a ",(0,o.kt)("inlineCode",{parentName:"p"},"start")," script to run your\napplication:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-json"},'{\n "scripts": {\n "start": "platformatic db"\n }\n}\n')),(0,o.kt)("p",null,"Before deploying, make sure a ",(0,o.kt)("strong",{parentName:"p"},".dockerignore")," file is created:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"cp .gitignore .dockerignore\n")),(0,o.kt)("p",null,"Finally, deploy the application to Fly by running:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"fly deploy\n")))}u.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/12584dad.41ab13f9.js b/assets/js/12584dad.41ab13f9.js new file mode 100644 index 00000000000..e8bcb2ef82a --- /dev/null +++ b/assets/js/12584dad.41ab13f9.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkplatformatic_oss_website=self.webpackChunkplatformatic_oss_website||[]).push([[10422],{3905:(e,r,t)=>{t.d(r,{Zo:()=>c,kt:()=>d});var n=t(67294);function o(e,r,t){return r in e?Object.defineProperty(e,r,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[r]=t,e}function a(e,r){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);r&&(n=n.filter((function(r){return Object.getOwnPropertyDescriptor(e,r).enumerable}))),t.push.apply(t,n)}return t}function i(e){for(var r=1;r=0||(o[t]=e[t]);return o}(e,r);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(o[t]=e[t])}return o}var p=n.createContext({}),s=function(e){var r=n.useContext(p),t=r;return e&&(t="function"==typeof e?e(r):i(i({},r),e)),t},c=function(e){var r=s(e.components);return n.createElement(p.Provider,{value:r},e.children)},f="mdxType",u={inlineCode:"code",wrapper:function(e){var r=e.children;return n.createElement(n.Fragment,{},r)}},g=n.forwardRef((function(e,r){var t=e.components,o=e.mdxType,a=e.originalType,p=e.parentName,c=l(e,["components","mdxType","originalType","parentName"]),f=s(t),g=o,d=f["".concat(p,".").concat(g)]||f[g]||u[g]||a;return t?n.createElement(d,i(i({ref:r},c),{},{components:t})):n.createElement(d,i({ref:r},c))}));function d(e,r){var t=arguments,o=r&&r.mdxType;if("string"==typeof e||o){var a=t.length,i=new Array(a);i[0]=g;var l={};for(var p in r)hasOwnProperty.call(r,p)&&(l[p]=r[p]);l.originalType=e,l[f]="string"==typeof e?e:o,i[1]=l;for(var s=2;s{t.r(r),t.d(r,{assets:()=>p,contentTitle:()=>i,default:()=>u,frontMatter:()=>a,metadata:()=>l,toc:()=>s});var n=t(87462),o=(t(67294),t(3905));const a={},i="Ignoring types and fields",l={unversionedId:"reference/sql-graphql/ignore",id:"version-0.42.0/reference/sql-graphql/ignore",title:"Ignoring types and fields",description:"@platformatic/sql-graphql allows to selectively ignore types and fields.",source:"@site/versioned_docs/version-0.42.0/reference/sql-graphql/ignore.md",sourceDirName:"reference/sql-graphql",slug:"/reference/sql-graphql/ignore",permalink:"/docs/0.42.0/reference/sql-graphql/ignore",draft:!1,editUrl:"https://github.com/platformatic/oss/edit/main/versioned_docs/version-0.42.0/reference/sql-graphql/ignore.md",tags:[],version:"0.42.0",frontMatter:{},sidebar:"docs",previous:{title:"Many To Many Relationship",permalink:"/docs/0.42.0/reference/sql-graphql/many-to-many"},next:{title:"Introduction to @platformatic/sql-mapper",permalink:"/docs/0.42.0/reference/sql-mapper/introduction"}},p={},s=[],c={toc:s},f="wrapper";function u(e){let{components:r,...t}=e;return(0,o.kt)(f,(0,n.Z)({},c,t,{components:r,mdxType:"MDXLayout"}),(0,o.kt)("h1",{id:"ignoring-types-and-fields"},"Ignoring types and fields"),(0,o.kt)("p",null,(0,o.kt)("inlineCode",{parentName:"p"},"@platformatic/sql-graphql")," allows to selectively ignore types and fields."),(0,o.kt)("p",null,"To ignore types:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-javascript"},"app.register(require('@platformatic/sql-graphql'), {\n ignore: {\n categories: true\n }\n})\n")),(0,o.kt)("p",null,"To ignore individual fields:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-javascript"},"app.register(require('@platformatic/sql-graphql'), {\n ignore: {\n categories: {\n name: true\n }\n }\n})\n")))}u.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/131e3b9e.4fc07f85.js b/assets/js/131e3b9e.4fc07f85.js new file mode 100644 index 00000000000..1dd45d41725 --- /dev/null +++ b/assets/js/131e3b9e.4fc07f85.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkplatformatic_oss_website=self.webpackChunkplatformatic_oss_website||[]).push([[236],{3905:(e,t,n)=>{n.d(t,{Zo:()=>s,kt:()=>m});var r=n(67294);function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function a(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function i(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var c=r.createContext({}),l=function(e){var t=r.useContext(c),n=t;return e&&(n="function"==typeof e?e(t):i(i({},t),e)),n},s=function(e){var t=l(e.components);return r.createElement(c.Provider,{value:t},e.children)},f="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},d=r.forwardRef((function(e,t){var n=e.components,o=e.mdxType,a=e.originalType,c=e.parentName,s=p(e,["components","mdxType","originalType","parentName"]),f=l(n),d=o,m=f["".concat(c,".").concat(d)]||f[d]||u[d]||a;return n?r.createElement(m,i(i({ref:t},s),{},{components:n})):r.createElement(m,i({ref:t},s))}));function m(e,t){var n=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var a=n.length,i=new Array(a);i[0]=d;var p={};for(var c in t)hasOwnProperty.call(t,c)&&(p[c]=t[c]);p.originalType=e,p[f]="string"==typeof e?e:o,i[1]=p;for(var l=2;l{n.r(t),n.d(t,{assets:()=>c,contentTitle:()=>i,default:()=>u,frontMatter:()=>a,metadata:()=>p,toc:()=>l});var r=n(87462),o=(n(67294),n(3905));const a={},i="Introduction to the REST API",p={unversionedId:"reference/sql-openapi/introduction",id:"version-0.42.1/reference/sql-openapi/introduction",title:"Introduction to the REST API",description:"The Platformatic DB OpenAPI plugin automatically starts a REST API server (powered by Fastify) that provides CRUD (Create, Read, Update, Delete) functionality for each entity.",source:"@site/versioned_docs/version-0.42.1/reference/sql-openapi/introduction.md",sourceDirName:"reference/sql-openapi",slug:"/reference/sql-openapi/introduction",permalink:"/docs/reference/sql-openapi/introduction",draft:!1,editUrl:"https://github.com/platformatic/oss/edit/main/versioned_docs/version-0.42.1/reference/sql-openapi/introduction.md",tags:[],version:"0.42.1",frontMatter:{},sidebar:"docs",previous:{title:"Frontend client",permalink:"/docs/reference/client/frontend"},next:{title:"API",permalink:"/docs/reference/sql-openapi/api"}},c={},l=[{value:"Configuration",id:"configuration",level:2}],s={toc:l},f="wrapper";function u(e){let{components:t,...n}=e;return(0,o.kt)(f,(0,r.Z)({},s,n,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h1",{id:"introduction-to-the-rest-api"},"Introduction to the REST API"),(0,o.kt)("p",null,"The Platformatic DB OpenAPI plugin automatically starts a REST API server (powered by ",(0,o.kt)("a",{parentName:"p",href:"https://fastify.io"},"Fastify"),") that provides CRUD (",(0,o.kt)("strong",{parentName:"p"},"C"),"reate, ",(0,o.kt)("strong",{parentName:"p"},"R"),"ead, ",(0,o.kt)("strong",{parentName:"p"},"U"),"pdate, ",(0,o.kt)("strong",{parentName:"p"},"D"),"elete) functionality for each entity."),(0,o.kt)("h2",{id:"configuration"},"Configuration"),(0,o.kt)("p",null,"In the config file, under the ",(0,o.kt)("inlineCode",{parentName:"p"},'"db"')," section, the OpenAPI server is enabled by default. Although you can disable it setting the property ",(0,o.kt)("inlineCode",{parentName:"p"},"openapi")," to ",(0,o.kt)("inlineCode",{parentName:"p"},"false"),"."),(0,o.kt)("p",null,(0,o.kt)("em",{parentName:"p"},"Example")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-json"},'{\n ...\n "db": {\n "openapi": false\n }\n}\n')),(0,o.kt)("p",null,"As Platformatic DB uses ",(0,o.kt)("a",{parentName:"p",href:"https://github.com/fastify/fastify-swagger"},(0,o.kt)("inlineCode",{parentName:"a"},"fastify-swagger"))," under the hood, the ",(0,o.kt)("inlineCode",{parentName:"p"},'"openapi"')," property can be an object that follows the ",(0,o.kt)("a",{parentName:"p",href:"https://swagger.io/specification/#oasObject"},"OpenAPI Specification Object")," format."),(0,o.kt)("p",null,"This allows you to extend the output of the Swagger UI documentation."))}u.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/1328284e.274f813f.js b/assets/js/1328284e.274f813f.js new file mode 100644 index 00000000000..4f978ad4e85 --- /dev/null +++ b/assets/js/1328284e.274f813f.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkplatformatic_oss_website=self.webpackChunkplatformatic_oss_website||[]).push([[81531],{3905:(e,t,n)=>{n.d(t,{Zo:()=>u,kt:()=>h});var a=n(67294);function r(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function i(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);t&&(a=a.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,a)}return n}function s(e){for(var t=1;t=0||(r[n]=e[n]);return r}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(r[n]=e[n])}return r}var l=a.createContext({}),p=function(e){var t=a.useContext(l),n=t;return e&&(n="function"==typeof e?e(t):s(s({},t),e)),n},u=function(e){var t=p(e.components);return a.createElement(l.Provider,{value:t},e.children)},c="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return a.createElement(a.Fragment,{},t)}},m=a.forwardRef((function(e,t){var n=e.components,r=e.mdxType,i=e.originalType,l=e.parentName,u=o(e,["components","mdxType","originalType","parentName"]),c=p(n),m=r,h=c["".concat(l,".").concat(m)]||c[m]||d[m]||i;return n?a.createElement(h,s(s({ref:t},u),{},{components:n})):a.createElement(h,s({ref:t},u))}));function h(e,t){var n=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var i=n.length,s=new Array(i);s[0]=m;var o={};for(var l in t)hasOwnProperty.call(t,l)&&(o[l]=t[l]);o.originalType=e,o[c]="string"==typeof e?e:r,s[1]=o;for(var p=2;p{n.r(t),n.d(t,{assets:()=>l,contentTitle:()=>s,default:()=>d,frontMatter:()=>i,metadata:()=>o,toc:()=>p});var a=n(87462),r=(n(67294),n(3905));const i={},s="Rules",o={unversionedId:"reference/db/authorization/rules",id:"version-0.41.1/reference/db/authorization/rules",title:"Rules",description:"Introduction",source:"@site/versioned_docs/version-0.41.1/reference/db/authorization/rules.md",sourceDirName:"reference/db/authorization",slug:"/reference/db/authorization/rules",permalink:"/docs/0.41.1/reference/db/authorization/rules",draft:!1,editUrl:"https://github.com/platformatic/oss/edit/main/versioned_docs/version-0.41.1/reference/db/authorization/rules.md",tags:[],version:"0.41.1",frontMatter:{},sidebar:"docs",previous:{title:"User Roles & Metadata",permalink:"/docs/0.41.1/reference/db/authorization/user-roles-metadata"},next:{title:"Plugin",permalink:"/docs/0.41.1/reference/db/plugin"}},l={},p=[{value:"Introduction",id:"introduction",level:2},{value:"Operation checks",id:"operation-checks",level:2},{value:"GraphQL events and subscriptions",id:"graphql-events-and-subscriptions",level:3},{value:"Restrict access to entity fields",id:"restrict-access-to-entity-fields",level:2},{value:"Set entity fields from user metadata",id:"set-entity-fields-from-user-metadata",level:2},{value:"Programmatic rules",id:"programmatic-rules",level:2},{value:"Access validation on entity mapper for plugins",id:"access-validation-on-entity-mapper-for-plugins",level:2},{value:"Skip authorization rules",id:"skip-authorization-rules",level:2},{value:"Avoid repetition of the same rule multiple times",id:"avoid-repetition-of-the-same-rule-multiple-times",level:2}],u={toc:p},c="wrapper";function d(e){let{components:t,...n}=e;return(0,r.kt)(c,(0,a.Z)({},u,n,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("h1",{id:"rules"},"Rules"),(0,r.kt)("h2",{id:"introduction"},"Introduction"),(0,r.kt)("p",null,"Authorization rules can be defined to control what operations users are\nable to execute via the REST or GraphQL APIs that are exposed by a Platformatic\nDB app."),(0,r.kt)("p",null,"Every rule must specify:"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"role")," (required) \u2014 A role name. It's a string and must match with the role(s) set by an external authentication service."),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"entity")," (optional) \u2014 The Platformatic DB entity to apply this rule to."),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"entities")," (optional) \u2014 The Platformatic DB entities to apply this rule to."),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"defaults")," (optional) \u2014 Configure entity fields that will be\n",(0,r.kt)("a",{parentName:"li",href:"#set-entity-fields-from-user-metadata"},"automatically set from user data"),"."),(0,r.kt)("li",{parentName:"ul"},"One entry for each supported CRUD operation: ",(0,r.kt)("inlineCode",{parentName:"li"},"find"),", ",(0,r.kt)("inlineCode",{parentName:"li"},"save"),", ",(0,r.kt)("inlineCode",{parentName:"li"},"delete"))),(0,r.kt)("p",null,"One of ",(0,r.kt)("inlineCode",{parentName:"p"},"entity")," and ",(0,r.kt)("inlineCode",{parentName:"p"},"entities")," must be specified."),(0,r.kt)("h2",{id:"operation-checks"},"Operation checks"),(0,r.kt)("p",null,"Every entity operation \u2014 such as ",(0,r.kt)("inlineCode",{parentName:"p"},"find"),", ",(0,r.kt)("inlineCode",{parentName:"p"},"insert"),", ",(0,r.kt)("inlineCode",{parentName:"p"},"save")," or ",(0,r.kt)("inlineCode",{parentName:"p"},"delete")," \u2014 can have\nauthorization ",(0,r.kt)("inlineCode",{parentName:"p"},"checks")," specified for them. This value can be ",(0,r.kt)("inlineCode",{parentName:"p"},"false")," (operation disabled)\nor ",(0,r.kt)("inlineCode",{parentName:"p"},"true")," (operation enabled with no checks)."),(0,r.kt)("p",null,"To specify more fine-grained authorization controls, add a ",(0,r.kt)("inlineCode",{parentName:"p"},"checks")," field, e.g.:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-json"},'{\n "role": "user",\n "entity": "page",\n "find": {\n "checks": {\n "userId": "X-PLATFORMATIC-USER-ID"\n }\n },\n ...\n}\n\n')),(0,r.kt)("p",null,"In this example, when a user with a ",(0,r.kt)("inlineCode",{parentName:"p"},"user")," role executes a ",(0,r.kt)("inlineCode",{parentName:"p"},"findPage"),", they can\naccess all the data that has ",(0,r.kt)("inlineCode",{parentName:"p"},"userId")," equal to the value in user metadata with\nkey ",(0,r.kt)("inlineCode",{parentName:"p"},"X-PLATFORMATIC-USER-ID"),"."),(0,r.kt)("p",null,"Note that ",(0,r.kt)("inlineCode",{parentName:"p"},'"userId": "X-PLATFORMATIC-USER-ID"')," is syntactic sugar for:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-json"},' "find": {\n "checks": {\n "userId": {\n "eq": "X-PLATFORMATIC-USER-ID"\n }\n }\n }\n')),(0,r.kt)("p",null,"It's possible to specify more complex rules using all the ",(0,r.kt)("a",{parentName:"p",href:"/docs/0.41.1/reference/sql-mapper/entities/api#where-clause"},"supported where clause operators"),"."),(0,r.kt)("p",null,"Note that ",(0,r.kt)("inlineCode",{parentName:"p"},"userId")," MUST exist as a field in the database table to use this feature."),(0,r.kt)("h3",{id:"graphql-events-and-subscriptions"},"GraphQL events and subscriptions"),(0,r.kt)("p",null,"Platformatic DB supports GraphQL subscriptions and therefore db-authorization must protect them.\nThe check is performed based on the ",(0,r.kt)("inlineCode",{parentName:"p"},"find")," permissions, the only permissions that are supported are:"),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"find: false"),", the subscription for that role is disabled"),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"find: { checks: { [prop]: 'X-PLATFORMATIC-PROP' } }")," validates that the given prop is equal"),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"find: { checks: { [prop]: { eq: 'X-PLATFORMATIC-PROP' } } }")," validates that the given prop is equal")),(0,r.kt)("p",null,"Conflicting rules across roles for different equality checks will not be supported."),(0,r.kt)("h2",{id:"restrict-access-to-entity-fields"},"Restrict access to entity fields"),(0,r.kt)("p",null,"If a ",(0,r.kt)("inlineCode",{parentName:"p"},"fields")," array is present on an operation, Platformatic DB restricts the columns on which the user can execute to that list.\nFor ",(0,r.kt)("inlineCode",{parentName:"p"},"save")," operations, the configuration must specify all the not-nullable fields (otherwise, it would fail at runtime).\nPlatformatic does these checks at startup."),(0,r.kt)("p",null,"Example:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-json"},' "rule": {\n "entity": "page",\n "role": "user",\n "find": {\n "checks": {\n "userId": "X-PLATFORMATIC-USER-ID"\n },\n "fields": ["id", "title"]\n }\n ...\n }\n')),(0,r.kt)("p",null,"In this case, only ",(0,r.kt)("inlineCode",{parentName:"p"},"id")," and ",(0,r.kt)("inlineCode",{parentName:"p"},"title")," are returned for a user with a ",(0,r.kt)("inlineCode",{parentName:"p"},"user")," role on the ",(0,r.kt)("inlineCode",{parentName:"p"},"page")," entity."),(0,r.kt)("h2",{id:"set-entity-fields-from-user-metadata"},"Set entity fields from user metadata"),(0,r.kt)("p",null,"Defaults are used in database insert and are default fields added automatically populated from user metadata, e.g.:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-json"},' "defaults": {\n "userId": "X-PLATFORMATIC-USER-ID"\n },\n')),(0,r.kt)("p",null,"When an entity is created, the ",(0,r.kt)("inlineCode",{parentName:"p"},"userId")," column is used and populated using the value from user metadata."),(0,r.kt)("h2",{id:"programmatic-rules"},"Programmatic rules"),(0,r.kt)("p",null,"If it's necessary to have more control over the authorizations, it's possible to specify the rules programmatically, e.g.:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-js"},"\n app.register(auth, {\n jwt: {\n secret: 'supersecret'\n },\n rules: [{\n role: 'user',\n entity: 'page',\n async find ({ user, ctx, where }) {\n return {\n ...where,\n userId: {\n eq: user['X-PLATFORMATIC-USER-ID']\n }\n }\n },\n async delete ({ user, ctx, where }) {\n return {\n ...where,\n userId: {\n eq: user['X-PLATFORMATIC-USER-ID']\n }\n }\n },\n defaults: {\n userId: async function ({ user, ctx, input }) {\n match(user, {\n 'X-PLATFORMATIC-USER-ID': generated.shift(),\n 'X-PLATFORMATIC-ROLE': 'user'\n })\n return user['X-PLATFORMATIC-USER-ID']\n }\n\n },\n async save ({ user, ctx, where }) {\n return {\n ...where,\n userId: {\n eq: user['X-PLATFORMATIC-USER-ID']\n }\n }\n }\n }]\n })\n\n")),(0,r.kt)("p",null,"In this example, the ",(0,r.kt)("inlineCode",{parentName:"p"},"user")," role can delete all the posts edited before yesterday:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-js"}," app.register(auth, {\n jwt: {\n secret: 'supersecret'\n },\n roleKey: 'X-PLATFORMATIC-ROLE',\n anonymousRole: 'anonymous',\n rules: [{\n role: 'user',\n entity: 'page',\n find: true,\n save: true,\n async delete ({ user, ctx, where }) {\n return {\n ...where,\n editedAt: {\n lt: yesterday\n }\n }\n },\n defaults: {\n userId: 'X-PLATFORMATIC-USER-ID'\n }\n }]\n })\n")),(0,r.kt)("h2",{id:"access-validation-on-entity-mapper-for-plugins"},"Access validation on ",(0,r.kt)("inlineCode",{parentName:"h2"},"entity mapper")," for plugins"),(0,r.kt)("p",null,"To assert that a specific user with it's ",(0,r.kt)("inlineCode",{parentName:"p"},"role(s)")," has the correct access rights to use entities on a ",(0,r.kt)("inlineCode",{parentName:"p"},"platformatic plugin")," the context should be passed to the ",(0,r.kt)("inlineCode",{parentName:"p"},"entity mapper")," in order to verify it's permissions like this:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-js"},"//plugin.js\n\napp.post('/', async (req, reply) => {\n const ctx = req.createPlatformaticCtx()\n \n await app.platformatic.entities.movie.find({\n where: { /*...*/ },\n ctx\n })\n})\n\n")),(0,r.kt)("h2",{id:"skip-authorization-rules"},"Skip authorization rules"),(0,r.kt)("p",null,"In custom plugins, it's possible to skip the authorization rules on entities programmatically by setting the ",(0,r.kt)("inlineCode",{parentName:"p"},"skipAuth")," flag to ",(0,r.kt)("inlineCode",{parentName:"p"},"true")," or not passing a ",(0,r.kt)("inlineCode",{parentName:"p"},"ctx"),", e.g.:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-js"},"// this works even if the user's role doesn't have the `find` permission.\nconst result = await app.platformatic.entities.page.find({skipAuth: true, ...})\n")),(0,r.kt)("p",null,"This has the same effect:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-js"},"// this works even if the user's role doesn't have the `find` permission\nconst result = await app.platformatic.entities.page.find() // no `ctx`\n")),(0,r.kt)("p",null,"This is useful for custom plugins for which the authentication is not necessary, so there is no user role set when invoked."),(0,r.kt)("admonition",{type:"info"},(0,r.kt)("p",{parentName:"admonition"},"Skip authorization rules is not possible on the automatically generated REST and GraphQL APIs.")),(0,r.kt)("h2",{id:"avoid-repetition-of-the-same-rule-multiple-times"},"Avoid repetition of the same rule multiple times"),(0,r.kt)("p",null,"Very often we end up writing the same rules over and over again.\nInstead, it's possible to condense the rule for multiple entities on a single entry:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-js"}," app.register(auth, {\n jwt: {\n secret: 'supersecret'\n },\n roleKey: 'X-PLATFORMATIC-ROLE',\n anonymousRole: 'anonymous',\n rules: [{\n role: 'anonymous',\n entities: ['category', 'page'],\n find: true,\n delete: false,\n save: false\n }]\n})\n")))}d.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/13826def.fa197dce.js b/assets/js/13826def.fa197dce.js new file mode 100644 index 00000000000..90670ad72cf --- /dev/null +++ b/assets/js/13826def.fa197dce.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkplatformatic_oss_website=self.webpackChunkplatformatic_oss_website||[]).push([[39721],{3905:(e,n,r)=>{r.d(n,{Zo:()=>c,kt:()=>m});var t=r(67294);function i(e,n,r){return n in e?Object.defineProperty(e,n,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[n]=r,e}function o(e,n){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var t=Object.getOwnPropertySymbols(e);n&&(t=t.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),r.push.apply(r,t)}return r}function a(e){for(var n=1;n=0||(i[r]=e[r]);return i}(e,n);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(t=0;t=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(i[r]=e[r])}return i}var p=t.createContext({}),s=function(e){var n=t.useContext(p),r=n;return e&&(r="function"==typeof e?e(n):a(a({},n),e)),r},c=function(e){var n=s(e.components);return t.createElement(p.Provider,{value:n},e.children)},f="mdxType",u={inlineCode:"code",wrapper:function(e){var n=e.children;return t.createElement(t.Fragment,{},n)}},d=t.forwardRef((function(e,n){var r=e.components,i=e.mdxType,o=e.originalType,p=e.parentName,c=l(e,["components","mdxType","originalType","parentName"]),f=s(r),d=i,m=f["".concat(p,".").concat(d)]||f[d]||u[d]||o;return r?t.createElement(m,a(a({ref:n},c),{},{components:r})):t.createElement(m,a({ref:n},c))}));function m(e,n){var r=arguments,i=n&&n.mdxType;if("string"==typeof e||i){var o=r.length,a=new Array(o);a[0]=d;var l={};for(var p in n)hasOwnProperty.call(n,p)&&(l[p]=n[p]);l.originalType=e,l[f]="string"==typeof e?e:i,a[1]=l;for(var s=2;s{r.r(n),r.d(n,{assets:()=>p,contentTitle:()=>a,default:()=>u,frontMatter:()=>o,metadata:()=>l,toc:()=>s});var t=r(87462),i=(r(67294),r(3905));const o={},a="Ignoring entities and fields",l={unversionedId:"reference/sql-openapi/ignore",id:"version-0.41.3/reference/sql-openapi/ignore",title:"Ignoring entities and fields",description:"@platformatic/sql-openapi allows to selectively ignore entities and fields.",source:"@site/versioned_docs/version-0.41.3/reference/sql-openapi/ignore.md",sourceDirName:"reference/sql-openapi",slug:"/reference/sql-openapi/ignore",permalink:"/docs/0.41.3/reference/sql-openapi/ignore",draft:!1,editUrl:"https://github.com/platformatic/oss/edit/main/versioned_docs/version-0.41.3/reference/sql-openapi/ignore.md",tags:[],version:"0.41.3",frontMatter:{},sidebar:"docs",previous:{title:"API",permalink:"/docs/0.41.3/reference/sql-openapi/api"},next:{title:"Introduction to the GraphQL API",permalink:"/docs/0.41.3/reference/sql-graphql/introduction"}},p={},s=[],c={toc:s},f="wrapper";function u(e){let{components:n,...r}=e;return(0,i.kt)(f,(0,t.Z)({},c,r,{components:n,mdxType:"MDXLayout"}),(0,i.kt)("h1",{id:"ignoring-entities-and-fields"},"Ignoring entities and fields"),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"@platformatic/sql-openapi")," allows to selectively ignore entities and fields."),(0,i.kt)("p",null,"To ignore entites:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-javascript"},"app.register(require('@platformatic/sql-openapi'), {\n ignore: {\n categories: true\n }\n})\n")),(0,i.kt)("p",null,"To ignore individual fields:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-javascript"},"app.register(require('@platformatic/sql-openapi'), {\n ignore: {\n categories: {\n name: true\n }\n }\n})\n")))}u.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/138e80c2.f5739480.js b/assets/js/138e80c2.f5739480.js new file mode 100644 index 00000000000..10b3990bdc4 --- /dev/null +++ b/assets/js/138e80c2.f5739480.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkplatformatic_oss_website=self.webpackChunkplatformatic_oss_website||[]).push([[82004],{3905:(e,t,n)=>{n.d(t,{Zo:()=>u,kt:()=>f});var r=n(67294);function a(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function o(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function i(e){for(var t=1;t=0||(a[n]=e[n]);return a}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(a[n]=e[n])}return a}var s=r.createContext({}),p=function(e){var t=r.useContext(s),n=t;return e&&(n="function"==typeof e?e(t):i(i({},t),e)),n},u=function(e){var t=p(e.components);return r.createElement(s.Provider,{value:t},e.children)},m="mdxType",c={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},d=r.forwardRef((function(e,t){var n=e.components,a=e.mdxType,o=e.originalType,s=e.parentName,u=l(e,["components","mdxType","originalType","parentName"]),m=p(n),d=a,f=m["".concat(s,".").concat(d)]||m[d]||c[d]||o;return n?r.createElement(f,i(i({ref:t},u),{},{components:n})):r.createElement(f,i({ref:t},u))}));function f(e,t){var n=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var o=n.length,i=new Array(o);i[0]=d;var l={};for(var s in t)hasOwnProperty.call(t,s)&&(l[s]=t[s]);l.originalType=e,l[m]="string"==typeof e?e:a,i[1]=l;for(var p=2;p{n.r(t),n.d(t,{assets:()=>s,contentTitle:()=>i,default:()=>c,frontMatter:()=>o,metadata:()=>l,toc:()=>p});var r=n(87462),a=(n(67294),n(3905));const o={},i="Migrations",l={unversionedId:"reference/db/migrations",id:"version-0.42.1/reference/db/migrations",title:"Migrations",description:"It uses Postgrator under the hood to run migrations. Please refer to the Postgrator documentation for guidance on writing migration files.",source:"@site/versioned_docs/version-0.42.1/reference/db/migrations.md",sourceDirName:"reference/db",slug:"/reference/db/migrations",permalink:"/docs/reference/db/migrations",draft:!1,editUrl:"https://github.com/platformatic/oss/edit/main/versioned_docs/version-0.42.1/reference/db/migrations.md",tags:[],version:"0.42.1",frontMatter:{},sidebar:"docs",previous:{title:"Configuration",permalink:"/docs/reference/db/configuration"},next:{title:"Authorization",permalink:"/docs/reference/db/authorization/introduction"}},s={},p=[{value:"How to run migrations",id:"how-to-run-migrations",level:2},{value:"Automatically on server start",id:"automatically-on-server-start",level:3},{value:"Manually with the CLI",id:"manually-with-the-cli",level:3}],u={toc:p},m="wrapper";function c(e){let{components:t,...n}=e;return(0,a.kt)(m,(0,r.Z)({},u,n,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h1",{id:"migrations"},"Migrations"),(0,a.kt)("p",null,"It uses ",(0,a.kt)("a",{parentName:"p",href:"https://www.npmjs.com/package/postgrator"},"Postgrator")," under the hood to run migrations. Please refer to the ",(0,a.kt)("a",{parentName:"p",href:"https://github.com/rickbergfalk/postgrator"},"Postgrator documentation")," for guidance on writing migration files."),(0,a.kt)("p",null,"In brief, you should create a file structure like this"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre"},"migrations/\n |- 001.do.sql\n |- 001.undo.sql\n |- 002.do.sql\n |- 002.undo.sql\n |- 003.do.sql\n |- 003.undo.sql\n |- 004.do.sql\n |- 004.undo.sql\n |- ... and so on\n")),(0,a.kt)("p",null,"Postgrator uses a table in your schema, to store which migrations have been already processed, so that only new ones will be applied at every server start."),(0,a.kt)("p",null,"You can always rollback some migrations specifing what version you would like to rollback to."),(0,a.kt)("p",null,(0,a.kt)("em",{parentName:"p"},"Example")),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"$ platformatic db migrations apply --to 002\n")),(0,a.kt)("p",null,"Will execute ",(0,a.kt)("inlineCode",{parentName:"p"},"004.undo.sql"),", ",(0,a.kt)("inlineCode",{parentName:"p"},"003.undo.sql")," in this order. If you keep those files in migrations directory, when the server restarts it will execute ",(0,a.kt)("inlineCode",{parentName:"p"},"003.do.sql")," and ",(0,a.kt)("inlineCode",{parentName:"p"},"004.do.sql")," in this order if the ",(0,a.kt)("inlineCode",{parentName:"p"},"autoApply")," value is true, or you can run the ",(0,a.kt)("inlineCode",{parentName:"p"},"db migrations apply")," command."),(0,a.kt)("p",null,"It's also possible to rollback a single migration with ",(0,a.kt)("inlineCode",{parentName:"p"},"-r"),": "),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"$ platformatic db migrations apply -r \n")),(0,a.kt)("h2",{id:"how-to-run-migrations"},"How to run migrations"),(0,a.kt)("p",null,"There are two ways to run migrations in Platformatic DB. They can be processed automatically when the server starts if the ",(0,a.kt)("inlineCode",{parentName:"p"},"autoApply")," value is true, or you can just run the ",(0,a.kt)("inlineCode",{parentName:"p"},"db migrations apply")," command."),(0,a.kt)("p",null,"In both cases you have to edit your config file to tell Platformatic DB where are your migration files."),(0,a.kt)("h3",{id:"automatically-on-server-start"},"Automatically on server start"),(0,a.kt)("p",null,"To run migrations when Platformatic DB starts, you need to use the config file root property ",(0,a.kt)("inlineCode",{parentName:"p"},"migrations"),"."),(0,a.kt)("p",null,"There are two options in the ",(0,a.kt)("inlineCode",{parentName:"p"},'"migrations"')," property"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"dir")," (",(0,a.kt)("em",{parentName:"li"},"required"),") the directory where the migration files are located. It will be relative to the config file path."),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"autoApply")," a boolean value that tells Platformatic DB to auto-apply migrations or not (default: ",(0,a.kt)("inlineCode",{parentName:"li"},"false"),")")),(0,a.kt)("p",null,(0,a.kt)("em",{parentName:"p"},"Example")),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-json"},'{\n ...\n "migrations": {\n "dir": "./path/to/migrations/folder",\n "autoApply": false\n }\n}\n')),(0,a.kt)("h3",{id:"manually-with-the-cli"},"Manually with the CLI"),(0,a.kt)("p",null,"See documentation about ",(0,a.kt)("inlineCode",{parentName:"p"},"db migrations apply")," ",(0,a.kt)("a",{parentName:"p",href:"../cli#migrate"},"command")),(0,a.kt)("p",null,"In short:"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"be sure to define a correct ",(0,a.kt)("inlineCode",{parentName:"li"},"migrations.dir")," folder under the config on ",(0,a.kt)("inlineCode",{parentName:"li"},"platformatic.db.json")),(0,a.kt)("li",{parentName:"ul"},"get the ",(0,a.kt)("inlineCode",{parentName:"li"},"MIGRATION_NUMBER")," (f.e. if the file is named ",(0,a.kt)("inlineCode",{parentName:"li"},"002.do.sql")," will be ",(0,a.kt)("inlineCode",{parentName:"li"},"002"),")"),(0,a.kt)("li",{parentName:"ul"},"run ",(0,a.kt)("inlineCode",{parentName:"li"},"npx platformatic db migrations apply --to MIGRATION_NUMBER"))))}c.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/14077682.7fab084a.js b/assets/js/14077682.7fab084a.js new file mode 100644 index 00000000000..fd23a46cbac --- /dev/null +++ b/assets/js/14077682.7fab084a.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkplatformatic_oss_website=self.webpackChunkplatformatic_oss_website||[]).push([[44451],{3905:(e,t,r)=>{r.d(t,{Zo:()=>u,kt:()=>f});var n=r(67294);function a(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function o(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function i(e){for(var t=1;t=0||(a[r]=e[r]);return a}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(a[r]=e[r])}return a}var s=n.createContext({}),c=function(e){var t=n.useContext(s),r=t;return e&&(r="function"==typeof e?e(t):i(i({},t),e)),r},u=function(e){var t=c(e.components);return n.createElement(s.Provider,{value:t},e.children)},d="mdxType",p={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},m=n.forwardRef((function(e,t){var r=e.components,a=e.mdxType,o=e.originalType,s=e.parentName,u=l(e,["components","mdxType","originalType","parentName"]),d=c(r),m=a,f=d["".concat(s,".").concat(m)]||d[m]||p[m]||o;return r?n.createElement(f,i(i({ref:t},u),{},{components:r})):n.createElement(f,i({ref:t},u))}));function f(e,t){var r=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var o=r.length,i=new Array(o);i[0]=m;var l={};for(var s in t)hasOwnProperty.call(t,s)&&(l[s]=t[s]);l.originalType=e,l[d]="string"==typeof e?e:a,i[1]=l;for(var c=2;c{r.r(t),r.d(t,{assets:()=>s,contentTitle:()=>i,default:()=>p,frontMatter:()=>o,metadata:()=>l,toc:()=>c});var n=r(87462),a=(r(67294),r(3905));const o={},i="Debug Platformatic DB",l={unversionedId:"guides/debug-platformatic-db",id:"version-0.41.2/guides/debug-platformatic-db",title:"Debug Platformatic DB",description:"Error: No tables found in the database",source:"@site/versioned_docs/version-0.41.2/guides/debug-platformatic-db.md",sourceDirName:"guides",slug:"/guides/debug-platformatic-db",permalink:"/docs/0.41.2/guides/debug-platformatic-db",draft:!1,editUrl:"https://github.com/platformatic/oss/edit/main/versioned_docs/version-0.41.2/guides/debug-platformatic-db.md",tags:[],version:"0.41.2",frontMatter:{},sidebar:"docs",previous:{title:"Monitoring with Prometheus and Grafana",permalink:"/docs/0.41.2/guides/monitoring"},next:{title:"Integrate Prisma with Platformatic DB",permalink:"/docs/0.41.2/guides/prisma"}},s={},c=[{value:"Error: No tables found in the database",id:"error-no-tables-found-in-the-database",level:2},{value:"Logging SQL queries",id:"logging-sql-queries",level:2}],u={toc:c},d="wrapper";function p(e){let{components:t,...r}=e;return(0,a.kt)(d,(0,n.Z)({},u,r,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h1",{id:"debug-platformatic-db"},"Debug Platformatic DB"),(0,a.kt)("h2",{id:"error-no-tables-found-in-the-database"},"Error: No tables found in the database"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"Verify your database connection string is correct in your Platformatic DB configuration",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"Make sure the database name is correct"))),(0,a.kt)("li",{parentName:"ul"},"Ensure that you have run the migration command ",(0,a.kt)("inlineCode",{parentName:"li"},"npx platformatic db migrations apply")," before starting the server. See the Platformatic DB ",(0,a.kt)("a",{parentName:"li",href:"https://docs.platformatic.dev/docs/reference/db/migrations"},"Migrations")," documentation for more information on working with migrations.")),(0,a.kt)("h2",{id:"logging-sql-queries"},"Logging SQL queries"),(0,a.kt)("p",null,"You can see all the queries that are being run against your database in your terminal by setting the logger level to trace in your ",(0,a.kt)("inlineCode",{parentName:"p"},"platformatic.db.json")," config file:"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-json",metastring:'title="platformatic.db.json"',title:'"platformatic.db.json"'},'{\n "server": {\n "logger": {\n "level": "trace"\n }\n }\n}\n')))}p.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/14eb3368.b2303fa9.js b/assets/js/14eb3368.b2303fa9.js new file mode 100644 index 00000000000..e88947a5b1a --- /dev/null +++ b/assets/js/14eb3368.b2303fa9.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkplatformatic_oss_website=self.webpackChunkplatformatic_oss_website||[]).push([[9817],{1310:(e,t,a)=>{a.d(t,{Z:()=>E});var n=a(87462),r=a(67294),i=a(86010),l=a(35281),s=a(53438),c=a(48596),o=a(39960),m=a(95999),d=a(44996);function u(e){return r.createElement("svg",(0,n.Z)({viewBox:"0 0 24 24"},e),r.createElement("path",{d:"M10 19v-5h4v5c0 .55.45 1 1 1h3c.55 0 1-.45 1-1v-7h1.7c.46 0 .68-.57.33-.87L12.67 3.6c-.38-.34-.96-.34-1.34 0l-8.36 7.53c-.34.3-.13.87.33.87H5v7c0 .55.45 1 1 1h3c.55 0 1-.45 1-1z",fill:"currentColor"}))}const h={breadcrumbHomeIcon:"breadcrumbHomeIcon_YNFT"};function b(){const e=(0,d.Z)("/");return r.createElement("li",{className:"breadcrumbs__item"},r.createElement(o.Z,{"aria-label":(0,m.I)({id:"theme.docs.breadcrumbs.home",message:"Home page",description:"The ARIA label for the home page in the breadcrumbs"}),className:"breadcrumbs__link",href:e},r.createElement(u,{className:h.breadcrumbHomeIcon})))}const v={breadcrumbsContainer:"breadcrumbsContainer_Z_bl"};function p(e){let{children:t,href:a,isLast:n}=e;const i="breadcrumbs__link";return n?r.createElement("span",{className:i,itemProp:"name"},t):a?r.createElement(o.Z,{className:i,href:a,itemProp:"item"},r.createElement("span",{itemProp:"name"},t)):r.createElement("span",{className:i},t)}function g(e){let{children:t,active:a,index:l,addMicrodata:s}=e;return r.createElement("li",(0,n.Z)({},s&&{itemScope:!0,itemProp:"itemListElement",itemType:"https://schema.org/ListItem"},{className:(0,i.Z)("breadcrumbs__item",{"breadcrumbs__item--active":a})}),t,r.createElement("meta",{itemProp:"position",content:String(l+1)}))}function E(){const e=(0,s.s1)(),t=(0,c.Ns)();return e?r.createElement("nav",{className:(0,i.Z)(l.k.docs.docBreadcrumbs,v.breadcrumbsContainer),"aria-label":(0,m.I)({id:"theme.docs.breadcrumbs.navAriaLabel",message:"Breadcrumbs",description:"The ARIA label for the breadcrumbs"})},r.createElement("ul",{className:"breadcrumbs",itemScope:!0,itemType:"https://schema.org/BreadcrumbList"},t&&r.createElement(b,null),e.map(((t,a)=>{const n=a===e.length-1;return r.createElement(g,{key:a,active:n,index:a,addMicrodata:!!t.href},r.createElement(p,{href:t.href,isLast:n},t.label))})))):null}},34228:(e,t,a)=>{a.r(t),a.d(t,{default:()=>y});var n=a(67294),r=a(1944),i=a(53438),l=a(44996),s=a(86010),c=a(39960),o=a(13919),m=a(95999);const d={cardContainer:"cardContainer_fWXF",cardTitle:"cardTitle_rnsV",cardDescription:"cardDescription_PWke"};function u(e){let{href:t,children:a}=e;return n.createElement(c.Z,{href:t,className:(0,s.Z)("card padding--lg",d.cardContainer)},a)}function h(e){let{href:t,icon:a,title:r,description:i}=e;return n.createElement(u,{href:t},n.createElement("h2",{className:(0,s.Z)("text--truncate",d.cardTitle),title:r},a," ",r),i&&n.createElement("p",{className:(0,s.Z)("text--truncate",d.cardDescription),title:i},i))}function b(e){let{item:t}=e;const a=(0,i.Wl)(t);return a?n.createElement(h,{href:a,icon:"\ud83d\uddc3\ufe0f",title:t.label,description:t.description??(0,m.I)({message:"{count} items",id:"theme.docs.DocCard.categoryDescription",description:"The default description for a category card in the generated index about how many items this category includes"},{count:t.items.length})}):null}function v(e){let{item:t}=e;const a=(0,o.Z)(t.href)?"\ud83d\udcc4\ufe0f":"\ud83d\udd17",r=(0,i.xz)(t.docId??void 0);return n.createElement(h,{href:t.href,icon:a,title:t.label,description:t.description??r?.description})}function p(e){let{item:t}=e;switch(t.type){case"link":return n.createElement(v,{item:t});case"category":return n.createElement(b,{item:t});default:throw new Error(`unknown item type ${JSON.stringify(t)}`)}}function g(e){let{className:t}=e;const a=(0,i.jA)();return n.createElement(E,{items:a.items,className:t})}function E(e){const{items:t,className:a}=e;if(!t)return n.createElement(g,e);const r=(0,i.MN)(t);return n.createElement("section",{className:(0,s.Z)("row",a)},r.map(((e,t)=>n.createElement("article",{key:t,className:"col col--6 margin-bottom--lg"},n.createElement(p,{item:e})))))}var f=a(80049),N=a(23120),Z=a(44364),k=a(1310),_=a(92503);const L={generatedIndexPage:"generatedIndexPage_vN6x",list:"list_eTzJ",title:"title_kItE"};function T(e){let{categoryGeneratedIndex:t}=e;return n.createElement(r.d,{title:t.title,description:t.description,keywords:t.keywords,image:(0,l.Z)(t.image)})}function x(e){let{categoryGeneratedIndex:t}=e;const a=(0,i.jA)();return n.createElement("div",{className:L.generatedIndexPage},n.createElement(N.Z,null),n.createElement(k.Z,null),n.createElement(Z.Z,null),n.createElement("header",null,n.createElement(_.Z,{as:"h1",className:L.title},t.title),t.description&&n.createElement("p",null,t.description)),n.createElement("article",{className:"margin-top--lg"},n.createElement(E,{items:a.items,className:L.list})),n.createElement("footer",{className:"margin-top--lg"},n.createElement(f.Z,{previous:t.navigation.previous,next:t.navigation.next})))}function y(e){return n.createElement(n.Fragment,null,n.createElement(T,e),n.createElement(x,e))}},80049:(e,t,a)=>{a.d(t,{Z:()=>s});var n=a(87462),r=a(67294),i=a(95999),l=a(32244);function s(e){const{previous:t,next:a}=e;return r.createElement("nav",{className:"pagination-nav docusaurus-mt-lg","aria-label":(0,i.I)({id:"theme.docs.paginator.navAriaLabel",message:"Docs pages",description:"The ARIA label for the docs pagination"})},t&&r.createElement(l.Z,(0,n.Z)({},t,{subLabel:r.createElement(i.Z,{id:"theme.docs.paginator.previous",description:"The label used to navigate to the previous doc"},"Previous")})),a&&r.createElement(l.Z,(0,n.Z)({},a,{subLabel:r.createElement(i.Z,{id:"theme.docs.paginator.next",description:"The label used to navigate to the next doc"},"Next"),isNext:!0})))}},44364:(e,t,a)=>{a.d(t,{Z:()=>c});var n=a(67294),r=a(86010),i=a(95999),l=a(35281),s=a(74477);function c(e){let{className:t}=e;const a=(0,s.E)();return a.badge?n.createElement("span",{className:(0,r.Z)(t,l.k.docs.docVersionBadge,"badge badge--secondary")},n.createElement(i.Z,{id:"theme.docs.versionBadge.label",values:{versionLabel:a.label}},"Version: {versionLabel}")):null}},23120:(e,t,a)=>{a.d(t,{Z:()=>p});var n=a(67294),r=a(86010),i=a(52263),l=a(39960),s=a(95999),c=a(94104),o=a(35281),m=a(60373),d=a(74477);const u={unreleased:function(e){let{siteTitle:t,versionMetadata:a}=e;return n.createElement(s.Z,{id:"theme.docs.versions.unreleasedVersionLabel",description:"The label used to tell the user that he's browsing an unreleased doc version",values:{siteTitle:t,versionLabel:n.createElement("b",null,a.label)}},"This is unreleased documentation for {siteTitle} {versionLabel} version.")},unmaintained:function(e){let{siteTitle:t,versionMetadata:a}=e;return n.createElement(s.Z,{id:"theme.docs.versions.unmaintainedVersionLabel",description:"The label used to tell the user that he's browsing an unmaintained doc version",values:{siteTitle:t,versionLabel:n.createElement("b",null,a.label)}},"This is documentation for {siteTitle} {versionLabel}, which is no longer actively maintained.")}};function h(e){const t=u[e.versionMetadata.banner];return n.createElement(t,e)}function b(e){let{versionLabel:t,to:a,onClick:r}=e;return n.createElement(s.Z,{id:"theme.docs.versions.latestVersionSuggestionLabel",description:"The label used to tell the user to check the latest version",values:{versionLabel:t,latestVersionLink:n.createElement("b",null,n.createElement(l.Z,{to:a,onClick:r},n.createElement(s.Z,{id:"theme.docs.versions.latestVersionLinkLabel",description:"The label used for the latest version suggestion link label"},"latest version")))}},"For up-to-date documentation, see the {latestVersionLink} ({versionLabel}).")}function v(e){let{className:t,versionMetadata:a}=e;const{siteConfig:{title:l}}=(0,i.Z)(),{pluginId:s}=(0,c.gA)({failfast:!0}),{savePreferredVersionName:d}=(0,m.J)(s),{latestDocSuggestion:u,latestVersionSuggestion:v}=(0,c.Jo)(s),p=u??(g=v).docs.find((e=>e.id===g.mainDocId));var g;return n.createElement("div",{className:(0,r.Z)(t,o.k.docs.docVersionBanner,"alert alert--warning margin-bottom--md"),role:"alert"},n.createElement("div",null,n.createElement(h,{siteTitle:l,versionMetadata:a})),n.createElement("div",{className:"margin-top--md"},n.createElement(b,{versionLabel:v.label,to:p.path,onClick:()=>d(v.name)})))}function p(e){let{className:t}=e;const a=(0,d.E)();return a.banner?n.createElement(v,{className:t,versionMetadata:a}):null}},92503:(e,t,a)=>{a.d(t,{Z:()=>m});var n=a(87462),r=a(67294),i=a(86010),l=a(95999),s=a(86668),c=a(39960);const o={anchorWithStickyNavbar:"anchorWithStickyNavbar_LWe7",anchorWithHideOnScrollNavbar:"anchorWithHideOnScrollNavbar_WYt5"};function m(e){let{as:t,id:a,...m}=e;const{navbar:{hideOnScroll:d}}=(0,s.L)();if("h1"===t||!a)return r.createElement(t,(0,n.Z)({},m,{id:void 0}));const u=(0,l.I)({id:"theme.common.headingLinkTitle",message:"Direct link to {heading}",description:"Title for link to heading"},{heading:"string"==typeof m.children?m.children:a});return r.createElement(t,(0,n.Z)({},m,{className:(0,i.Z)("anchor",d?o.anchorWithHideOnScrollNavbar:o.anchorWithStickyNavbar,m.className),id:a}),m.children,r.createElement(c.Z,{className:"hash-link",to:`#${a}`,"aria-label":u,title:u},"\u200b"))}},32244:(e,t,a)=>{a.d(t,{Z:()=>l});var n=a(67294),r=a(86010),i=a(39960);function l(e){const{permalink:t,title:a,subLabel:l,isNext:s}=e;return n.createElement(i.Z,{className:(0,r.Z)("pagination-nav__link",s?"pagination-nav__link--next":"pagination-nav__link--prev"),to:t},l&&n.createElement("div",{className:"pagination-nav__sublabel"},l),n.createElement("div",{className:"pagination-nav__label"},a))}}}]); \ No newline at end of file diff --git a/assets/js/15516d3a.8aa89b6c.js b/assets/js/15516d3a.8aa89b6c.js new file mode 100644 index 00000000000..3cc6937bd2c --- /dev/null +++ b/assets/js/15516d3a.8aa89b6c.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkplatformatic_oss_website=self.webpackChunkplatformatic_oss_website||[]).push([[7687],{3905:(e,t,n)=>{n.d(t,{Zo:()=>p,kt:()=>d});var a=n(67294);function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function r(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);t&&(a=a.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,a)}return n}function i(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var s=a.createContext({}),c=function(e){var t=a.useContext(s),n=t;return e&&(n="function"==typeof e?e(t):i(i({},t),e)),n},p=function(e){var t=c(e.components);return a.createElement(s.Provider,{value:t},e.children)},u="mdxType",m={inlineCode:"code",wrapper:function(e){var t=e.children;return a.createElement(a.Fragment,{},t)}},f=a.forwardRef((function(e,t){var n=e.components,o=e.mdxType,r=e.originalType,s=e.parentName,p=l(e,["components","mdxType","originalType","parentName"]),u=c(n),f=o,d=u["".concat(s,".").concat(f)]||u[f]||m[f]||r;return n?a.createElement(d,i(i({ref:t},p),{},{components:n})):a.createElement(d,i({ref:t},p))}));function d(e,t){var n=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var r=n.length,i=new Array(r);i[0]=f;var l={};for(var s in t)hasOwnProperty.call(t,s)&&(l[s]=t[s]);l.originalType=e,l[u]="string"==typeof e?e:o,i[1]=l;for(var c=2;c{n.r(t),n.d(t,{assets:()=>s,contentTitle:()=>i,default:()=>m,frontMatter:()=>r,metadata:()=>l,toc:()=>c});var a=n(87462),o=(n(67294),n(3905));const r={},i="Packaging a Platformatic Application as a module",l={unversionedId:"guides/packaging-an-application-as-a-module",id:"guides/packaging-an-application-as-a-module",title:"Packaging a Platformatic Application as a module",description:"Platformatic Service and Platformatic DB",source:"@site/docs/guides/packaging-an-application-as-a-module.md",sourceDirName:"guides",slug:"/guides/packaging-an-application-as-a-module",permalink:"/docs/next/guides/packaging-an-application-as-a-module",draft:!1,editUrl:"https://github.com/platformatic/platformatic/edit/main/docs/guides/packaging-an-application-as-a-module.md",tags:[],version:"current",frontMatter:{},sidebar:"docs",previous:{title:"Migrating an Express app to Platformatic Service",permalink:"/docs/next/guides/migrating-express-app-to-platformatic-service"},next:{title:"Telemetry with Jaeger",permalink:"/docs/next/guides/telemetry"}},s={},c=[{value:"Creating a custom Service",id:"creating-a-custom-service",level:2},{value:"Consuming a custom application",id:"consuming-a-custom-application",level:2},{value:"Building your own CLI",id:"building-your-own-cli",level:2}],p={toc:c},u="wrapper";function m(e){let{components:t,...n}=e;return(0,o.kt)(u,(0,a.Z)({},p,n,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h1",{id:"packaging-a-platformatic-application-as-a-module"},"Packaging a Platformatic Application as a module"),(0,o.kt)("p",null,(0,o.kt)("a",{parentName:"p",href:"/docs/next/reference/db/introduction"},"Platformatic Service")," and ",(0,o.kt)("a",{parentName:"p",href:"/docs/next/reference/db/introduction"},"Platformatic DB"),"\noffer a good starting point to create new applications. However, most developers or organizations might want to\ncreate reusable services or applications built on top of Platformatic.\nThis is useful to publish the application on the public npm registry (or a private one!), including building your own CLI,\nor to create a specialized template for your organization to allow for centralized bugfixes and updates."),(0,o.kt)("p",null,"This process is the same one we use to maintain Platformatic DB and Platformatic Composer on top of Platformatic Service."),(0,o.kt)("h2",{id:"creating-a-custom-service"},"Creating a custom Service"),(0,o.kt)("p",null,"We are creating the module ",(0,o.kt)("inlineCode",{parentName:"p"},"foo.js")," as follows: "),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-js"},"const { schema, platformaticService } = require('@platformatic/service')\n\n/** @type {import('fastify').FastifyPluginAsync<{}>} */\nasync function foo (app, opts) {\n const text = app.platformatic.config.foo.text\n app.get('/foo', async (request, reply) => {\n return text\n })\n\n await platformaticService(app, opts)\n}\n\nfoo.configType = 'foo'\n\n// break Fastify encapsulation\nfoo[Symbol.for('skip-override')] = true\n\n// The schema for our configuration file\nfoo.schema = {\n $id: 'https://example.com/schemas/foo.json',\n title: 'Foo Service',\n type: 'object',\n properties: {\n server: schema.server,\n plugins: schema.plugins,\n metrics: schema.metrics,\n watch: {\n anyOf: [schema.watch, {\n type: 'boolean'\n }, {\n type: 'string'\n }]\n },\n $schema: {\n type: 'string'\n },\n module: {\n type: 'string'\n },\n foo: {\n type: 'object',\n properties: {\n text: {\n type: 'string'\n }\n },\n required: ['text']\n }\n },\n additionalProperties: false,\n required: ['server']\n}\n\n// The configuration for the ConfigManager\nfoo.configManagerConfig = {\n schema: foo.schema,\n envWhitelist: ['PORT', 'HOSTNAME'],\n allowToWatch: ['.env'],\n schemaOptions: {\n useDefaults: true,\n coerceTypes: true,\n allErrors: true,\n strict: false\n }\n}\n\nmodule.exports = foo\n")),(0,o.kt)("p",null,"Note that the ",(0,o.kt)("inlineCode",{parentName:"p"},"$id")," property of the schema identifies the module in our system,\nallowing us to retrieve the schema correctly.\nIt is recommended, but not required, that the JSON schema is actually\npublished in this location. Doing so allows tooling such as the VSCode\nlanguage server to provide autocompletion."),(0,o.kt)("p",null,"In this example, the ",(0,o.kt)("inlineCode",{parentName:"p"},"schema")," adds a custom top-level ",(0,o.kt)("inlineCode",{parentName:"p"},"foo")," property\nthat users can use to configure this specific module."),(0,o.kt)("p",null,"ESM is also supported."),(0,o.kt)("h2",{id:"consuming-a-custom-application"},"Consuming a custom application"),(0,o.kt)("p",null,"Consuming ",(0,o.kt)("inlineCode",{parentName:"p"},"foo.js")," is simple. We can create a ",(0,o.kt)("inlineCode",{parentName:"p"},"platformatic.json")," file as follows:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-json"},'{\n "$schema": "https://example.com/schemas/foo.json",\n "module": "./foo",\n "server": {\n "port": 0,\n "hostname": "127.0.0.1"\n },\n "foo": {\n "text": "Hello World"\n }\n}\n')),(0,o.kt)("p",null,"Note that we ",(0,o.kt)("strong",{parentName:"p"},"must")," specify both the ",(0,o.kt)("inlineCode",{parentName:"p"},"$schema")," property and ",(0,o.kt)("inlineCode",{parentName:"p"},"module"),".\nModule can also be any modules published on npm and installed via your package manager."),(0,o.kt)("h2",{id:"building-your-own-cli"},"Building your own CLI"),(0,o.kt)("p",null,"It is possible to build your own CLI with the following ",(0,o.kt)("inlineCode",{parentName:"p"},"cli.mjs")," file:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"import foo from './foo.js'\nimport { start } from '@platformatic/service'\nimport { printAndExitLoadConfigError } from '@platformatic/config'\n\nawait start(foo, process.argv.splice(2)).catch(printConfigValidationErrors)\n")),(0,o.kt)("p",null,"This will also load ",(0,o.kt)("inlineCode",{parentName:"p"},"platformatic.foo.json")," files."))}m.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/15c300a6.ab7618d9.js b/assets/js/15c300a6.ab7618d9.js new file mode 100644 index 00000000000..18f1fd37ee2 --- /dev/null +++ b/assets/js/15c300a6.ab7618d9.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkplatformatic_oss_website=self.webpackChunkplatformatic_oss_website||[]).push([[68003],{3905:(e,t,r)=>{r.d(t,{Zo:()=>u,kt:()=>f});var n=r(67294);function a(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function o(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function i(e){for(var t=1;t=0||(a[r]=e[r]);return a}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(a[r]=e[r])}return a}var s=n.createContext({}),c=function(e){var t=n.useContext(s),r=t;return e&&(r="function"==typeof e?e(t):i(i({},t),e)),r},u=function(e){var t=c(e.components);return n.createElement(s.Provider,{value:t},e.children)},d="mdxType",p={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},m=n.forwardRef((function(e,t){var r=e.components,a=e.mdxType,o=e.originalType,s=e.parentName,u=l(e,["components","mdxType","originalType","parentName"]),d=c(r),m=a,f=d["".concat(s,".").concat(m)]||d[m]||p[m]||o;return r?n.createElement(f,i(i({ref:t},u),{},{components:r})):n.createElement(f,i({ref:t},u))}));function f(e,t){var r=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var o=r.length,i=new Array(o);i[0]=m;var l={};for(var s in t)hasOwnProperty.call(t,s)&&(l[s]=t[s]);l.originalType=e,l[d]="string"==typeof e?e:a,i[1]=l;for(var c=2;c{r.r(t),r.d(t,{assets:()=>s,contentTitle:()=>i,default:()=>p,frontMatter:()=>o,metadata:()=>l,toc:()=>c});var n=r(87462),a=(r(67294),r(3905));const o={},i="Debug Platformatic DB",l={unversionedId:"guides/debug-platformatic-db",id:"version-0.42.1/guides/debug-platformatic-db",title:"Debug Platformatic DB",description:"Error: No tables found in the database",source:"@site/versioned_docs/version-0.42.1/guides/debug-platformatic-db.md",sourceDirName:"guides",slug:"/guides/debug-platformatic-db",permalink:"/docs/guides/debug-platformatic-db",draft:!1,editUrl:"https://github.com/platformatic/oss/edit/main/versioned_docs/version-0.42.1/guides/debug-platformatic-db.md",tags:[],version:"0.42.1",frontMatter:{},sidebar:"docs",previous:{title:"Monitoring with Prometheus and Grafana",permalink:"/docs/guides/monitoring"},next:{title:"Integrate Prisma with Platformatic DB",permalink:"/docs/guides/prisma"}},s={},c=[{value:"Error: No tables found in the database",id:"error-no-tables-found-in-the-database",level:2},{value:"Logging SQL queries",id:"logging-sql-queries",level:2}],u={toc:c},d="wrapper";function p(e){let{components:t,...r}=e;return(0,a.kt)(d,(0,n.Z)({},u,r,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h1",{id:"debug-platformatic-db"},"Debug Platformatic DB"),(0,a.kt)("h2",{id:"error-no-tables-found-in-the-database"},"Error: No tables found in the database"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"Verify your database connection string is correct in your Platformatic DB configuration",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"Make sure the database name is correct"))),(0,a.kt)("li",{parentName:"ul"},"Ensure that you have run the migration command ",(0,a.kt)("inlineCode",{parentName:"li"},"npx platformatic db migrations apply")," before starting the server. See the Platformatic DB ",(0,a.kt)("a",{parentName:"li",href:"https://docs.platformatic.dev/docs/reference/db/migrations"},"Migrations")," documentation for more information on working with migrations.")),(0,a.kt)("h2",{id:"logging-sql-queries"},"Logging SQL queries"),(0,a.kt)("p",null,"You can see all the queries that are being run against your database in your terminal by setting the logger level to trace in your ",(0,a.kt)("inlineCode",{parentName:"p"},"platformatic.db.json")," config file:"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-json",metastring:'title="platformatic.db.json"',title:'"platformatic.db.json"'},'{\n "server": {\n "logger": {\n "level": "trace"\n }\n }\n}\n')))}p.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/16ed1844.7e79c7d2.js b/assets/js/16ed1844.7e79c7d2.js new file mode 100644 index 00000000000..afa89b9decc --- /dev/null +++ b/assets/js/16ed1844.7e79c7d2.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkplatformatic_oss_website=self.webpackChunkplatformatic_oss_website||[]).push([[37839],{3905:(e,t,n)=>{n.d(t,{Zo:()=>u,kt:()=>m});var r=n(67294);function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function a(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function i(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var p=r.createContext({}),s=function(e){var t=r.useContext(p),n=t;return e&&(n="function"==typeof e?e(t):i(i({},t),e)),n},u=function(e){var t=s(e.components);return r.createElement(p.Provider,{value:t},e.children)},c="mdxType",g={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},d=r.forwardRef((function(e,t){var n=e.components,o=e.mdxType,a=e.originalType,p=e.parentName,u=l(e,["components","mdxType","originalType","parentName"]),c=s(n),d=o,m=c["".concat(p,".").concat(d)]||c[d]||g[d]||a;return n?r.createElement(m,i(i({ref:t},u),{},{components:n})):r.createElement(m,i({ref:t},u))}));function m(e,t){var n=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var a=n.length,i=new Array(a);i[0]=d;var l={};for(var p in t)hasOwnProperty.call(t,p)&&(l[p]=t[p]);l.originalType=e,l[c]="string"==typeof e?e:o,i[1]=l;for(var s=2;s{n.r(t),n.d(t,{assets:()=>p,contentTitle:()=>i,default:()=>g,frontMatter:()=>a,metadata:()=>l,toc:()=>s});var r=n(87462),o=(n(67294),n(3905));const a={},i="Logging",l={unversionedId:"reference/db/logging",id:"version-0.41.3/reference/db/logging",title:"Logging",description:"Platformatic DB uses a low overhead logger named Pino",source:"@site/versioned_docs/version-0.41.3/reference/db/logging.md",sourceDirName:"reference/db",slug:"/reference/db/logging",permalink:"/docs/0.41.3/reference/db/logging",draft:!1,editUrl:"https://github.com/platformatic/oss/edit/main/versioned_docs/version-0.41.3/reference/db/logging.md",tags:[],version:"0.41.3",frontMatter:{},sidebar:"docs",previous:{title:"Plugin",permalink:"/docs/0.41.3/reference/db/plugin"},next:{title:"Programmatic API",permalink:"/docs/0.41.3/reference/db/programmatic"}},p={},s=[{value:"Logger output level",id:"logger-output-level",level:2},{value:"Log formatting",id:"log-formatting",level:2}],u={toc:s},c="wrapper";function g(e){let{components:t,...n}=e;return(0,o.kt)(c,(0,r.Z)({},u,n,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h1",{id:"logging"},"Logging"),(0,o.kt)("p",null,"Platformatic DB uses a low overhead logger named ",(0,o.kt)("a",{parentName:"p",href:"https://github.com/pinojs/pino"},"Pino"),"\nto output structured log messages."),(0,o.kt)("h2",{id:"logger-output-level"},"Logger output level"),(0,o.kt)("p",null,"By default the logger output level is set to ",(0,o.kt)("inlineCode",{parentName:"p"},"info"),", meaning that all log messages\nwith a level of ",(0,o.kt)("inlineCode",{parentName:"p"},"info")," or above will be output by the logger. See the\n",(0,o.kt)("a",{parentName:"p",href:"https://github.com/pinojs/pino/blob/master/docs/api.md#level-string"},"Pino documentation"),"\nfor details on the supported log levels."),(0,o.kt)("p",null,"The logger output level can be overriden by adding a ",(0,o.kt)("inlineCode",{parentName:"p"},"logger")," object to the ",(0,o.kt)("inlineCode",{parentName:"p"},"server"),"\nconfiguration settings group:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-json",metastring:'title="platformatic.db.json"',title:'"platformatic.db.json"'},'{\n "server": {\n "logger": {\n "level": "error"\n },\n ...\n },\n ...\n}\n')),(0,o.kt)("h2",{id:"log-formatting"},"Log formatting"),(0,o.kt)("p",null,"If you run Platformatic DB in a terminal, where standard out (",(0,o.kt)("a",{parentName:"p",href:"https://en.wikipedia.org/wiki/Standard_streams#Standard_output_(stdout)"},"stdout"),")\nis a ",(0,o.kt)("a",{parentName:"p",href:"https://en.wikipedia.org/wiki/Tty_(Unix)"},"TTY"),":"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://github.com/pinojs/pino-pretty"},"pino-pretty")," is automatically used\nto pretty print the logs and make them easier to read during development."),(0,o.kt)("li",{parentName:"ul"},"The Platformatic logo is printed (if colors are supported in the terminal emulator)")),(0,o.kt)("p",null,"Example:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},'$ npx platformatic db start\n\n\n\n\n /////////////\n ///// /////\n /// ///\n /// ///\n /// ///\n && /// /// &&\n &&&&&& /// /// &&&&&&\n &&&& /// /// &&&&\n &&& /// /// &&&&&&&&&&&&\n &&& /// /////// //// && &&&&&\n && /// /////////////// &&&\n &&& /// /// &&&\n &&& /// // &&\n &&& /// &&\n &&& /// &&&\n &&&& /// &&&\n &&&&& /// &&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&\n ///\n ///\n ///\n ///\n ///\n ///\n\n\n[11:20:33.466] INFO (337606): server listening\n url: "http://127.0.0.1:3042"\n\n')),(0,o.kt)("p",null,"If stdout is redirected to a non-TTY, the logo is not printed and the logs are\nformatted as newline-delimited JSON:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},'$ npx platformatic db start | head\n{"level":30,"time":1665566628973,"pid":338365,"hostname":"darkav2","url":"http://127.0.0.1:3042","msg":"server listening"}\n')))}g.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/1786350c.a5e0da32.js b/assets/js/1786350c.a5e0da32.js new file mode 100644 index 00000000000..0ffc6eb2e58 --- /dev/null +++ b/assets/js/1786350c.a5e0da32.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkplatformatic_oss_website=self.webpackChunkplatformatic_oss_website||[]).push([[92151,41037],{3905:(e,t,n)=>{n.d(t,{Zo:()=>p,kt:()=>f});var a=n(67294);function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function r(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);t&&(a=a.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,a)}return n}function i(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var s=a.createContext({}),c=function(e){var t=a.useContext(s),n=t;return e&&(n="function"==typeof e?e(t):i(i({},t),e)),n},p=function(e){var t=c(e.components);return a.createElement(s.Provider,{value:t},e.children)},u="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return a.createElement(a.Fragment,{},t)}},m=a.forwardRef((function(e,t){var n=e.components,o=e.mdxType,r=e.originalType,s=e.parentName,p=l(e,["components","mdxType","originalType","parentName"]),u=c(n),m=o,f=u["".concat(s,".").concat(m)]||u[m]||d[m]||r;return n?a.createElement(f,i(i({ref:t},p),{},{components:n})):a.createElement(f,i({ref:t},p))}));function f(e,t){var n=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var r=n.length,i=new Array(r);i[0]=m;var l={};for(var s in t)hasOwnProperty.call(t,s)&&(l[s]=t[s]);l.originalType=e,l[u]="string"==typeof e?e:o,i[1]=l;for(var c=2;c{n.d(t,{Z:()=>i});var a=n(67294),o=n(86010);const r={tabItem:"tabItem_Ymn6"};function i(e){let{children:t,hidden:n,className:i}=e;return a.createElement("div",{role:"tabpanel",className:(0,o.Z)(r.tabItem,i),hidden:n},t)}},74866:(e,t,n)=>{n.d(t,{Z:()=>w});var a=n(87462),o=n(67294),r=n(86010),i=n(12466),l=n(16550),s=n(91980),c=n(67392),p=n(50012);function u(e){return function(e){return o.Children.map(e,(e=>{if(!e||(0,o.isValidElement)(e)&&function(e){const{props:t}=e;return!!t&&"object"==typeof t&&"value"in t}(e))return e;throw new Error(`Docusaurus error: Bad child <${"string"==typeof e.type?e.type:e.type.name}>: all children of the component should be , and every should have a unique "value" prop.`)}))?.filter(Boolean)??[]}(e).map((e=>{let{props:{value:t,label:n,attributes:a,default:o}}=e;return{value:t,label:n,attributes:a,default:o}}))}function d(e){const{values:t,children:n}=e;return(0,o.useMemo)((()=>{const e=t??u(n);return function(e){const t=(0,c.l)(e,((e,t)=>e.value===t.value));if(t.length>0)throw new Error(`Docusaurus error: Duplicate values "${t.map((e=>e.value)).join(", ")}" found in . Every value needs to be unique.`)}(e),e}),[t,n])}function m(e){let{value:t,tabValues:n}=e;return n.some((e=>e.value===t))}function f(e){let{queryString:t=!1,groupId:n}=e;const a=(0,l.k6)(),r=function(e){let{queryString:t=!1,groupId:n}=e;if("string"==typeof t)return t;if(!1===t)return null;if(!0===t&&!n)throw new Error('Docusaurus error: The component groupId prop is required if queryString=true, because this value is used as the search param name. You can also provide an explicit value such as queryString="my-search-param".');return n??null}({queryString:t,groupId:n});return[(0,s._X)(r),(0,o.useCallback)((e=>{if(!r)return;const t=new URLSearchParams(a.location.search);t.set(r,e),a.replace({...a.location,search:t.toString()})}),[r,a])]}function v(e){const{defaultValue:t,queryString:n=!1,groupId:a}=e,r=d(e),[i,l]=(0,o.useState)((()=>function(e){let{defaultValue:t,tabValues:n}=e;if(0===n.length)throw new Error("Docusaurus error: the component requires at least one children component");if(t){if(!m({value:t,tabValues:n}))throw new Error(`Docusaurus error: The has a defaultValue "${t}" but none of its children has the corresponding value. Available values are: ${n.map((e=>e.value)).join(", ")}. If you intend to show no default tab, use defaultValue={null} instead.`);return t}const a=n.find((e=>e.default))??n[0];if(!a)throw new Error("Unexpected error: 0 tabValues");return a.value}({defaultValue:t,tabValues:r}))),[s,c]=f({queryString:n,groupId:a}),[u,v]=function(e){let{groupId:t}=e;const n=function(e){return e?`docusaurus.tab.${e}`:null}(t),[a,r]=(0,p.Nk)(n);return[a,(0,o.useCallback)((e=>{n&&r.set(e)}),[n,r])]}({groupId:a}),h=(()=>{const e=s??u;return m({value:e,tabValues:r})?e:null})();(0,o.useLayoutEffect)((()=>{h&&l(h)}),[h]);return{selectedValue:i,selectValue:(0,o.useCallback)((e=>{if(!m({value:e,tabValues:r}))throw new Error(`Can't select invalid tab value=${e}`);l(e),c(e),v(e)}),[c,v,r]),tabValues:r}}var h=n(72389);const g={tabList:"tabList__CuJ",tabItem:"tabItem_LNqP"};function y(e){let{className:t,block:n,selectedValue:l,selectValue:s,tabValues:c}=e;const p=[],{blockElementScrollPositionUntilNextRender:u}=(0,i.o5)(),d=e=>{const t=e.currentTarget,n=p.indexOf(t),a=c[n].value;a!==l&&(u(t),s(a))},m=e=>{let t=null;switch(e.key){case"Enter":d(e);break;case"ArrowRight":{const n=p.indexOf(e.currentTarget)+1;t=p[n]??p[0];break}case"ArrowLeft":{const n=p.indexOf(e.currentTarget)-1;t=p[n]??p[p.length-1];break}}t?.focus()};return o.createElement("ul",{role:"tablist","aria-orientation":"horizontal",className:(0,r.Z)("tabs",{"tabs--block":n},t)},c.map((e=>{let{value:t,label:n,attributes:i}=e;return o.createElement("li",(0,a.Z)({role:"tab",tabIndex:l===t?0:-1,"aria-selected":l===t,key:t,ref:e=>p.push(e),onKeyDown:m,onClick:d},i,{className:(0,r.Z)("tabs__item",g.tabItem,i?.className,{"tabs__item--active":l===t})}),n??t)})))}function k(e){let{lazy:t,children:n,selectedValue:a}=e;const r=(Array.isArray(n)?n:[n]).filter(Boolean);if(t){const e=r.find((e=>e.props.value===a));return e?(0,o.cloneElement)(e,{className:"margin-top--md"}):null}return o.createElement("div",{className:"margin-top--md"},r.map(((e,t)=>(0,o.cloneElement)(e,{key:t,hidden:e.props.value!==a}))))}function b(e){const t=v(e);return o.createElement("div",{className:(0,r.Z)("tabs-container",g.tabList)},o.createElement(y,(0,a.Z)({},e,t)),o.createElement(k,(0,a.Z)({},e,t)))}function w(e){const t=(0,h.Z)();return o.createElement(b,(0,a.Z)({key:String(t)},e))}},70647:(e,t,n)=>{n.r(t),n.d(t,{assets:()=>p,contentTitle:()=>s,default:()=>f,frontMatter:()=>l,metadata:()=>c,toc:()=>u});var a=n(87462),o=(n(67294),n(3905)),r=n(74866),i=n(85162);const l={},s=void 0,c={unversionedId:"getting-started/new-api-project-instructions",id:"getting-started/new-api-project-instructions",title:"new-api-project-instructions",description:"Run this command in your terminal to start the Platformatic creator wizard:",source:"@site/docs/getting-started/new-api-project-instructions.md",sourceDirName:"getting-started",slug:"/getting-started/new-api-project-instructions",permalink:"/docs/next/getting-started/new-api-project-instructions",draft:!1,editUrl:"https://github.com/platformatic/platformatic/edit/main/docs/getting-started/new-api-project-instructions.md",tags:[],version:"current",frontMatter:{}},p={},u=[],d={toc:u},m="wrapper";function f(e){let{components:t,...n}=e;return(0,o.kt)(m,(0,a.Z)({},d,n,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("p",null,"Run this command in your terminal to start the Platformatic creator wizard:"),(0,o.kt)(r.Z,{groupId:"package-manager-create",mdxType:"Tabs"},(0,o.kt)(i.Z,{value:"npm",label:"npm",mdxType:"TabItem"},(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"npm create platformatic@latest\n"))),(0,o.kt)(i.Z,{value:"yarn",label:"yarn",mdxType:"TabItem"},(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"yarn create platformatic\n"))),(0,o.kt)(i.Z,{value:"pnpm",label:"pnpm",mdxType:"TabItem"},(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"pnpm create platformatic@latest\n")))),(0,o.kt)("p",null,"This interactive command-line tool will ask you some questions about how you'd\nlike to set up your new Platformatic project. For this guide, select these options:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"- Which kind of project do you want to create? => DB\n- Where would you like to create your project? => quick-start\n- Do you want to create default migrations? => Yes\n- Do you want to create a plugin? => Yes\n- Do you want to use TypeScript? => No\n- Do you want to install dependencies? => Yes (this can take a while)\n- Do you want to apply the migrations? => Yes\n- Do you want to generate types? => Yes\n- Do you want to create the github action to deploy this application to Platformatic Cloud dynamic workspace? => No\n- Do you want to create the github action to deploy this application to Platformatic Cloud static workspace? => No\n")),(0,o.kt)("p",null,"Once the wizard is complete, you'll have a Platformatic app project in the\nfolder ",(0,o.kt)("inlineCode",{parentName:"p"},"quick-start"),", with example migration files and a plugin script."),(0,o.kt)("admonition",{type:"info"},(0,o.kt)("p",{parentName:"admonition"},"Make sure you run the npm/yarn/pnpm command ",(0,o.kt)("inlineCode",{parentName:"p"},"install")," command manually if you\ndon't ask the wizard to do it for you.")))}f.isMDXComponent=!0},96947:(e,t,n)=>{n.r(t),n.d(t,{assets:()=>u,contentTitle:()=>c,default:()=>v,frontMatter:()=>s,metadata:()=>p,toc:()=>d});var a=n(87462),o=(n(67294),n(3905)),r=n(74866),i=n(85162),l=n(70647);const s={},c="Generate Front-end Code to Consume Platformatic REST API",p={unversionedId:"guides/generate-frontend-code-to-consume-platformatic-rest-api",id:"guides/generate-frontend-code-to-consume-platformatic-rest-api",title:"Generate Front-end Code to Consume Platformatic REST API",description:"By default, a Platformatic app exposes REST API that provide CRUD (Create, Read,",source:"@site/docs/guides/generate-frontend-code-to-consume-platformatic-rest-api.md",sourceDirName:"guides",slug:"/guides/generate-frontend-code-to-consume-platformatic-rest-api",permalink:"/docs/next/guides/generate-frontend-code-to-consume-platformatic-rest-api",draft:!1,editUrl:"https://github.com/platformatic/platformatic/edit/main/docs/guides/generate-frontend-code-to-consume-platformatic-rest-api.md",tags:[],version:"current",frontMatter:{},sidebar:"docs",previous:{title:"Integrate Prisma with Platformatic DB",permalink:"/docs/next/guides/prisma"},next:{title:"Migrating a Fastify app to Platformatic Service",permalink:"/docs/next/guides/migrating-fastify-app-to-platformatic-service"}},u={},d=[{value:"Create a new Platformatic app",id:"create-a-new-platformatic-app",level:2},{value:"Configure the new Platformatic app",id:"configure-the-new-platformatic-app",level:2},{value:"Create a new Front-end Application",id:"create-a-new-front-end-application",level:2},{value:"Generate the front-end code to consume the Platformatic app REST API",id:"generate-the-front-end-code-to-consume-the-platformatic-app-rest-api",level:2},{value:"React and Vue.js components that read, create, and update an entity",id:"react-and-vuejs-components-that-read-create-and-update-an-entity",level:2},{value:"Import the new component in your front-end application",id:"import-the-new-component-in-your-front-end-application",level:2},{value:"Have fun",id:"have-fun",level:2}],m={toc:d},f="wrapper";function v(e){let{components:t,...s}=e;return(0,o.kt)(f,(0,a.Z)({},m,s,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h1",{id:"generate-front-end-code-to-consume-platformatic-rest-api"},"Generate Front-end Code to Consume Platformatic REST API"),(0,o.kt)("p",null,"By default, a Platformatic app exposes REST API that provide CRUD (Create, Read,\nUpdate, Delete) functionality for each entity (see the\n",(0,o.kt)("a",{parentName:"p",href:"https://docs.platformatic.dev/docs/reference/sql-openapi/introduction"},"Introduction to the REST API"),"\ndocumentation for more information on the REST API)."),(0,o.kt)("p",null,"Platformatic CLI allows to auto-generate the front-end code to import in your\nfront-end application to consume the Platformatic REST API."),(0,o.kt)("p",null,"This guide"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},"Explains how to create a new Platformatic app."),(0,o.kt)("li",{parentName:"ul"},"Explains how to configure the new Platformatic app."),(0,o.kt)("li",{parentName:"ul"},"Explains how to create a new React or Vue.js front-end application."),(0,o.kt)("li",{parentName:"ul"},"Explains how to generate the front-end TypeScript code to consume the Platformatic app REST API."),(0,o.kt)("li",{parentName:"ul"},"Provide some React and Vue.js components (either of them written in TypeScript) that read, create, and update an entity."),(0,o.kt)("li",{parentName:"ul"},"Explains how to import the new component in your front-end application.")),(0,o.kt)("h2",{id:"create-a-new-platformatic-app"},"Create a new Platformatic app"),(0,o.kt)(l.default,{mdxType:"NewApiProjectInstructions"}),(0,o.kt)("h2",{id:"configure-the-new-platformatic-app"},"Configure the new Platformatic app"),(0,o.kt)("p",null,'documentation to create a new Platformatic app. Every Platformatic app uses the "Movie" demo entity and includes\nthe corresponding table, migrations, and REST API to create, read, update, and delete movies.'),(0,o.kt)("p",null,"Once the new Platformatic app is ready:"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},"Set up CORS in ",(0,o.kt)("inlineCode",{parentName:"li"},"platformatic.db.json"))),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-diff"},'{\n "$schema": "https://platformatic.dev/schemas/v0.24.0/db",\n "server": {\n "hostname": "{PLT_SERVER_HOSTNAME}",\n "port": "{PORT}",\n "logger": {\n "level": "{PLT_SERVER_LOGGER_LEVEL}"\n },\n+ "cors": {\n+ "origin": {\n+ "regexp": "/*/"\n+ }\n+ }\n },\n ...\n}\n')),(0,o.kt)("p",null," You can find more details about the cors configuration ",(0,o.kt)("a",{parentName:"p",href:"https://docs.platformatic.dev/docs/guides/generate-frontend-code-to-consume-platformatic-rest-api"},"here"),"."),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},"launch Platformatic through ",(0,o.kt)("inlineCode",{parentName:"li"},"npm start"),".\nThen, the Platformatic app should be available at the ",(0,o.kt)("inlineCode",{parentName:"li"},"http://127.0.0.1:3042/")," URL.")),(0,o.kt)("h2",{id:"create-a-new-front-end-application"},"Create a new Front-end Application"),(0,o.kt)("p",null,"Refer to the ",(0,o.kt)("a",{parentName:"p",href:"https://vitejs.dev/guide/#scaffolding-your-first-vite-project"},"Scaffolding Your First Vite Project"),'\ndocumentation to create a new front-end application, and call it "rest-api-frontend".'),(0,o.kt)("admonition",{type:"info"},(0,o.kt)("p",{parentName:"admonition"},"Please note Vite is suggested only for practical reasons, but the bundler of choice does not make any difference.")),(0,o.kt)("p",null,"If you are using npm 7+ you should run"),(0,o.kt)(r.Z,{groupId:"import-new-component",mdxType:"Tabs"},(0,o.kt)(i.Z,{value:"react",label:"React",mdxType:"TabItem"},(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"npm create vite@latest rest-api-frontend -- --template react-ts\n"))),(0,o.kt)(i.Z,{value:"vue",label:"Vue.js",mdxType:"TabItem"},(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"npm create vite@latest rest-api-frontend -- --template vue-ts\n")))),(0,o.kt)("p",null,"and then follow the Vite's instructions"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"Scaffolding project in /Users/noriste/Sites/temp/platformatic/rest-api-frontend...\n\nDone. Now run:\n\n cd rest-api-frontend\n npm install\n npm run dev\n")),(0,o.kt)("p",null,"Once done, the front-end application is available at ",(0,o.kt)("inlineCode",{parentName:"p"},"http://localhost:5174/"),"."),(0,o.kt)("h2",{id:"generate-the-front-end-code-to-consume-the-platformatic-app-rest-api"},"Generate the front-end code to consume the Platformatic app REST API"),(0,o.kt)("p",null,"Now that either the Platformatic app and the front-end app are running, go to the front-end codebase and run the Platformatic CLI"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"cd rest-api-frontend/src\nnpx platformatic frontend http://127.0.0.1:3042 ts\n")),(0,o.kt)("p",null,"Refer to the ",(0,o.kt)("a",{parentName:"p",href:"https://docs.platformatic.dev/docs/reference/cli#frontend"},"Platformatic CLI frontend command"),"\ndocumentation to know about the available options."),(0,o.kt)("p",null,"The Platformatic CLI generates"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("inlineCode",{parentName:"li"},"api.d.ts"),": A TypeScript module that includes all the OpenAPI-related types.\nHere is part of the generated code")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-ts"},"interface GetMoviesRequest {\n 'limit'?: number;\n 'offset'?: number;\n // ... etc.\n}\n\ninterface GetMoviesResponseOK {\n 'id'?: number;\n 'title': string;\n}\n\n\n// ... etc.\n\nexport interface Api {\n setBaseUrl(baseUrl: string): void;\n getMovies(req: GetMoviesRequest): Promise>;\n createMovie(req: CreateMovieRequest): Promise;\n // ... etc.\n}\n")),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("inlineCode",{parentName:"li"},"api.ts"),": A TypeScript module that includes a typed function for every single OpenAPI endpoint.\nHere is part of the generated code")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-ts"},"import type { Api } from './api-types'\n\nlet baseUrl = ''\nexport function setBaseUrl(newUrl: string) { baseUrl = newUrl };\n\nexport const createMovie: Api['createMovie'] = async (request) => {\n const response = await fetch(`${baseUrl}/movies/`, {\n method:'post',\n body: JSON.stringify(request),\n headers: {\n 'Content-Type': 'application/json'\n }\n })\n\n if (!response.ok) {\n throw new Error(await response.text())\n }\n\n return await response.json()\n}\n\n// etc.\n\n")),(0,o.kt)("p",null,"You can add a ",(0,o.kt)("inlineCode",{parentName:"p"},"--name")," option to the command line to provide a custom name for the generated files."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"cd rest-api-frontend/src\nnpx platformatic frontend --name foobar http://127.0.0.1:3042 ts\n")),(0,o.kt)("p",null,"will generated ",(0,o.kt)("inlineCode",{parentName:"p"},"foobar.ts")," and ",(0,o.kt)("inlineCode",{parentName:"p"},"foobar-types.d.ts")),(0,o.kt)("h2",{id:"react-and-vuejs-components-that-read-create-and-update-an-entity"},"React and Vue.js components that read, create, and update an entity"),(0,o.kt)("p",null,"You can copy/paste the following React or Vue.js components that import the code\nthe Platformatic CLI generated."),(0,o.kt)(r.Z,{groupId:"import-new-component",mdxType:"Tabs"},(0,o.kt)(i.Z,{value:"react",label:"React",mdxType:"TabItem"},(0,o.kt)("p",null,"Create a new file ",(0,o.kt)("inlineCode",{parentName:"p"},"src/PlatformaticPlayground.tsx")," and copy/paste the following code."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-tsx"},"import { useEffect, useState } from 'react'\n\n// getMovies, createMovie, and updateMovie are all functions automatically generated by Platformatic\n// in the `api.ts` module.\nimport { getMovies, createMovie, updateMovie, setBaseUrl } from './api'\n\nsetBaseUrl('http://127.0.0.1:3042') // configure this according to your needs\n\nexport function PlatformaticPlayground() {\n const [movies, setMovies] = useState>>([])\n const [newMovie, setNewMovie] = useState>>()\n\n async function onCreateMovie() {\n const newMovie = await createMovie({ title: 'Harry Potter' })\n setNewMovie(newMovie)\n }\n\n async function onUpdateMovie() {\n if (!newMovie || !newMovie.id) return\n\n const updatedMovie = await updateMovie({ id: newMovie.id, title: 'The Lord of the Rings' })\n setNewMovie(updatedMovie)\n }\n\n useEffect(() => {\n async function fetchMovies() {\n const movies = await getMovies({})\n setMovies(movies)\n }\n\n fetchMovies()\n }, [])\n\n return (\n <>\n

Movies

\n\n {movies.length === 0 ? (\n
No movies yet
\n ) : (\n
    \n {movies.map((movie) => (\n
  • {movie.title}
  • \n ))}\n
\n )}\n\n \n \n\n {newMovie &&
Title: {newMovie.title}
}\n \n )\n}\n"))),(0,o.kt)(i.Z,{value:"vue",label:"Vue.js",mdxType:"TabItem"},(0,o.kt)("p",null,"Create a new file ",(0,o.kt)("inlineCode",{parentName:"p"},"src/PlatformaticPlayground.vue")," and copy/paste the following code."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-vue"},' + + + + \ No newline at end of file diff --git a/blog/atom.xml b/blog/atom.xml new file mode 100644 index 00000000000..41c12b4b9c0 --- /dev/null +++ b/blog/atom.xml @@ -0,0 +1,22 @@ + + + https://docs.platformatic.dev/blog + Platformatic Open Source Software Blog + 2022-08-22T00:00:00.000Z + https://github.com/jpmonette/feed + + Platformatic Open Source Software Blog + https://docs.platformatic.dev/img/favicon.ico + + <![CDATA[Coming Soon]]> + https://docs.platformatic.dev/blog/coming-soon + + 2022-08-22T00:00:00.000Z + + Welcome to platformatic!

We are working hard to launch platformatic, stay tuned!

]]>
+ + Matteo Collina + https://github.com/mcollina + +
+
\ No newline at end of file diff --git a/blog/coming-soon/index.html b/blog/coming-soon/index.html new file mode 100644 index 00000000000..f4293f0d947 --- /dev/null +++ b/blog/coming-soon/index.html @@ -0,0 +1,17 @@ + + + + + +Coming Soon | Platformatic Open Source Software + + + + + +
+

Coming Soon

· One min read
Matteo Collina

Welcome to platformatic!

We are working hard to launch platformatic, stay tuned!

+ + + + \ No newline at end of file diff --git a/blog/index.html b/blog/index.html new file mode 100644 index 00000000000..ed9473cadf7 --- /dev/null +++ b/blog/index.html @@ -0,0 +1,17 @@ + + + + + +Blog | Platformatic Open Source Software + + + + + +
+

· One min read
Matteo Collina

Welcome to platformatic!

We are working hard to launch platformatic, stay tuned!

+ + + + \ No newline at end of file diff --git a/blog/rss.xml b/blog/rss.xml new file mode 100644 index 00000000000..6cbd45db09b --- /dev/null +++ b/blog/rss.xml @@ -0,0 +1,20 @@ + + + + Platformatic Open Source Software Blog + https://docs.platformatic.dev/blog + Platformatic Open Source Software Blog + Mon, 22 Aug 2022 00:00:00 GMT + https://validator.w3.org/feed/docs/rss2.html + https://github.com/jpmonette/feed + en + + <![CDATA[Coming Soon]]> + https://docs.platformatic.dev/blog/coming-soon + https://docs.platformatic.dev/blog/coming-soon + Mon, 22 Aug 2022 00:00:00 GMT + + Welcome to platformatic!

We are working hard to launch platformatic, stay tuned!

]]>
+
+
+
\ No newline at end of file diff --git a/docs/0.41.1/category/getting-started/index.html b/docs/0.41.1/category/getting-started/index.html new file mode 100644 index 00000000000..4333f76771e --- /dev/null +++ b/docs/0.41.1/category/getting-started/index.html @@ -0,0 +1,17 @@ + + + + + +Getting Started | Platformatic Open Source Software + + + + + + + + + + \ No newline at end of file diff --git a/docs/0.41.1/category/guides/index.html b/docs/0.41.1/category/guides/index.html new file mode 100644 index 00000000000..6b22a9ab779 --- /dev/null +++ b/docs/0.41.1/category/guides/index.html @@ -0,0 +1,17 @@ + + + + + +Guides | Platformatic Open Source Software + + + + + +
+
Version: 0.41.1

Guides

+ + + + \ No newline at end of file diff --git a/docs/0.41.1/category/packages/index.html b/docs/0.41.1/category/packages/index.html new file mode 100644 index 00000000000..a6b77e59874 --- /dev/null +++ b/docs/0.41.1/category/packages/index.html @@ -0,0 +1,17 @@ + + + + + +Packages | Platformatic Open Source Software + + + + + + + + + + \ No newline at end of file diff --git a/docs/0.41.1/category/platformatic-cloud/index.html b/docs/0.41.1/category/platformatic-cloud/index.html new file mode 100644 index 00000000000..90c2daa2fca --- /dev/null +++ b/docs/0.41.1/category/platformatic-cloud/index.html @@ -0,0 +1,17 @@ + + + + + +Platformatic Cloud | Platformatic Open Source Software + + + + + + + + + + \ No newline at end of file diff --git a/docs/0.41.1/category/reference/index.html b/docs/0.41.1/category/reference/index.html new file mode 100644 index 00000000000..b1439a721de --- /dev/null +++ b/docs/0.41.1/category/reference/index.html @@ -0,0 +1,17 @@ + + + + + +Reference | Platformatic Open Source Software + + + + + + + + + + \ No newline at end of file diff --git a/docs/0.41.1/contributing/documentation-style-guide/index.html b/docs/0.41.1/contributing/documentation-style-guide/index.html new file mode 100644 index 00000000000..0f85ed36110 --- /dev/null +++ b/docs/0.41.1/contributing/documentation-style-guide/index.html @@ -0,0 +1,74 @@ + + + + + +Documentation Style Guide | Platformatic Open Source Software + + + + + +
+
Version: 0.41.1

Documentation Style Guide

Welcome to the Platformatic Documentation Style Guide. This guide is here to provide +you with a conventional writing style for users writing developer documentation on +our Open Source framework. Each topic is precise and well explained to help you write +documentation users can easily understand and implement.

Who is This Guide For?

This guide is for anyone who loves to build with Platformatic or wants to contribute +to our documentation. You do not need to be an expert in writing technical +documentation. This guide is here to help you.

Visit CONTRIBUTING.md +file on GitHub to join our Open Source folks.

Before you Write

You should have a basic understanding of:

  • JavaScript
  • Node.js
  • Git
  • GitHub
  • Markdown
  • HTTP
  • NPM

Consider Your Audience

Before you start writing, think about your audience. In this case, your audience +should already know HTTP, JavaScript, NPM, and Node.js. It is necessary to keep +your readers in mind because they are the ones consuming your content. You want +to give as much useful information as possible. Consider the vital things they +need to know and how they can understand them. Use words and references that +readers can relate to easily. Ask for feedback from the community, it can help +you write better documentation that focuses on the user and what you want to +achieve.

Get Straight to the Point

Give your readers a clear and precise action to take. Start with what is most +important. This way, you can help them find what they need faster. Mostly, +readers tend to read the first content on a page, and many will not scroll +further.

Example

Less like this:

Colons are very important to register a parametric path. It lets +the framework know there is a new parameter created. You can place the colon +before the parameter name so the parametric path can be created.

More Like this:

To register a parametric path, put a colon before the parameter +name. Using a colon lets the framework know it is a parametric path and not a +static path.

Images and Video Should Enhance the Written Documentation

Images and video should only be added if they complement the written +documentation, for example to help the reader form a clearer mental model of a +concept or pattern.

Images can be directly embedded, but videos should be included by linking to an +external site, such as YouTube. You can add links by using +[Title](https://www.websitename.com) in the Markdown.

Avoid Plagiarism

Make sure you avoid copying other people's work. Keep it as original as +possible. You can learn from what they have done and reference where it is from +if you used a particular quote from their work.

Word Choice

There are a few things you need to use and avoid when writing your documentation +to improve readability for readers and make documentation neat, direct, and +clean.

When to use the Second Person "you" as the Pronoun

When writing articles or guides, your content should communicate directly to +readers in the second person ("you") addressed form. It is easier to give them +direct instruction on what to do on a particular topic. To see an example, visit +the Quick Start Guide.

Example

Less like this:

We can use the following plugins.

More like this:

You can use the following plugins.

According to Wikipedia, You is usually a second person pronoun. +Also, used to refer to an indeterminate person, as a more common alternative +to a very formal indefinite pronoun.

To recap, use "you" when writing articles or guides.

When to Avoid the Second Person "you" as the Pronoun

One of the main rules of formal writing such as reference documentation, or API +documentation, is to avoid the second person ("you") or directly addressing the +reader.

Example

Less like this:

You can use the following recommendation as an example.

More like this:

As an example, the following recommendations should be +referenced.

To view a live example, refer to the Decorators +reference document.

To recap, avoid "you" in reference documentation or API documentation.

Avoid Using Contractions

Contractions are the shortened version of written and spoken forms of a word, +i.e. using "don't" instead of "do not". Avoid contractions to provide a more +formal tone.

Avoid Using Condescending Terms

Condescending terms are words that include:

  • Just
  • Easy
  • Simply
  • Basically
  • Obviously

The reader may not find it easy to use Platformatic; avoid +words that make it sound simple, easy, offensive, or insensitive. Not everyone +who reads the documentation has the same level of understanding.

Starting With a Verb

Mostly start your description with a verb, which makes it simple and precise for +the reader to follow. Prefer using present tense because it is easier to read +and understand than the past or future tense.

Example

Less like this:

There is a need for Node.js to be installed before you can be +able to use Platformatic.

More like this:

Install Node.js to make use of Platformatic.

Grammatical Moods

Grammatical moods are a great way to express your writing. Avoid sounding too +bossy while making a direct statement. Know when to switch between indicative, +imperative, and subjunctive moods.

Indicative - Use when making a factual statement or question.

Example

Since there is no testing framework available, "Platformatic recommends ways +to write tests".

Imperative - Use when giving instructions, actions, commands, or when you +write your headings.

Example

Install dependencies before starting development.

Subjunctive - Use when making suggestions, hypotheses, or non-factual +statements.

Example

Reading the documentation on our website is recommended to get +comprehensive knowledge of the framework.

Use Active Voice Instead of Passive

Using active voice is a more compact and direct way of conveying your +documentation.

Example

Passive:

The node dependencies and packages are installed by npm.

Active:

npm installs packages and node dependencies.

Writing Style

Documentation Titles

When creating a new guide, API, or reference in the /docs/ directory, use +short titles that best describe the topic of your documentation. Name your files +in kebab-cases and avoid Raw or camelCase. To learn more about kebab-case you +can visit this medium article on Case +Styles.

Examples:

hook-and-plugins.md

adding-test-plugins.md

removing-requests.md

Hyperlinks should have a clear title of what it references. Here is how your +hyperlink should look:

<!-- More like this -->

// Add clear & brief description
[Fastify Plugins] (https://www.fastify.io/docs/latest/Plugins/)

<!--Less like this -->

// incomplete description
[Fastify] (https://www.fastify.io/docs/latest/Plugins/)

// Adding title in link brackets
[](https://www.fastify.io/docs/latest/Plugins/ "fastify plugin")

// Empty title
[](https://www.fastify.io/docs/latest/Plugins/)

// Adding links localhost URLs instead of using code strings (``)
[http://localhost:3000/](http://localhost:3000/)

Include in your documentation as many essential references as possible, but +avoid having numerous links when writing to avoid distractions.

+ + + + \ No newline at end of file diff --git a/docs/0.41.1/contributing/index.html b/docs/0.41.1/contributing/index.html new file mode 100644 index 00000000000..2c8eb11546c --- /dev/null +++ b/docs/0.41.1/contributing/index.html @@ -0,0 +1,18 @@ + + + + + +Contributing | Platformatic Open Source Software + + + + + +
+
+ + + + \ No newline at end of file diff --git a/docs/0.41.1/getting-started/architecture/index.html b/docs/0.41.1/getting-started/architecture/index.html new file mode 100644 index 00000000000..8174cf53241 --- /dev/null +++ b/docs/0.41.1/getting-started/architecture/index.html @@ -0,0 +1,25 @@ + + + + + +Architecture | Platformatic Open Source Software + + + + + +
+
Version: 0.41.1

Architecture

Platformatic is a collection of Open Source tools designed to eliminate friction +in backend development. The first of those tools is Platformatic DB, which is developed +as @platformatic/db.

Platformatic DB

Platformatic DB can expose a SQL database by dynamically mapping it to REST/OpenAPI +and GraphQL endpoints. It supports a limited subset of the SQL query language, but +also allows developers to add their own custom routes and resolvers.

Platformatic DB Architecture

Platformatic DB is composed of a few key libraries:

  1. @platformatic/sql-mapper - follows the Data Mapper pattern to build an API on top of a SQL database. +Internally it uses the @database project.
  2. @platformatic/sql-openapi - uses sql-mapper to create a series of REST routes and matching OpenAPI definitions. +Internally it uses @fastify/swagger.
  3. @platformatic/sql-graphql - uses sql-mapper to create a GraphQL endpoint and schema. sql-graphql also support Federation. +Internally it uses mercurius.

Platformatic DB allows you to load a Fastify plugin during server startup that contains your own application-specific code. +The plugin can add more routes or resolvers — these will automatically be shown in the OpenAPI and GraphQL schemas.

SQL database migrations are also supported. They're implemented internally with the postgrator library.

+ + + + \ No newline at end of file diff --git a/docs/0.41.1/getting-started/movie-quotes-app-tutorial/index.html b/docs/0.41.1/getting-started/movie-quotes-app-tutorial/index.html new file mode 100644 index 00000000000..61368f1dc66 --- /dev/null +++ b/docs/0.41.1/getting-started/movie-quotes-app-tutorial/index.html @@ -0,0 +1,129 @@ + + + + + +Movie Quotes App Tutorial | Platformatic Open Source Software + + + + + +
+
Version: 0.41.1

Movie Quotes App Tutorial

This tutorial will help you learn how to build a full stack application on top +of Platformatic DB. We're going to build an application that allows us to +save our favourite movie quotes. We'll also be building in custom API functionality +that allows for some neat user interaction on our frontend.

You can find the complete code for the application that we're going to build +on GitHub.

note

We'll be building the frontend of our application with the Astro +framework, but the GraphQL API integration steps that we're going to cover can +be applied with most frontend frameworks.

What we're going to cover

In this tutorial we'll learn how to:

  • Create a Platformatic API
  • Apply database migrations
  • Create relationships between our API entities
  • Populate our database tables
  • Build a frontend application that integrates with our GraphQL API
  • Extend our API with custom functionality
  • Enable CORS on our Platformatic API

Prerequisites

To follow along with this tutorial you'll need to have these things installed:

You'll also need to have some experience with JavaScript, and be comfortable with +running commands in a terminal.

Build the backend

Create a Platformatic API

First, let's create our project directory:

mkdir -p tutorial-movie-quotes-app/apps/movie-quotes-api/

cd tutorial-movie-quotes-app/apps/movie-quotes-api/

Run this command in your terminal to start the Platformatic creator wizard:

npm create platformatic@latest

This interactive command-line tool will ask you some questions about how you'd +like to set up your new Platformatic project. For this guide, select these options:

- Which kind of project do you want to create?  => DB
- Where would you like to create your project? => quick-start
- Do you want to create default migrations? => Yes
- Do you want to create a plugin? => Yes
- Do you want to use TypeScript? => No
- Do you want to install dependencies? => Yes (this can take a while)
- Do you want to apply the migrations? => Yes
- Do you want to generate types? => Yes
- Do you want to create the github action to deploy this application to Platformatic Cloud dynamic workspace? => No
- Do you want to create the github action to deploy this application to Platformatic Cloud static workspace? => No

Once the wizard is complete, you'll have a Platformatic app project in the +folder quick-start, with example migration files and a plugin script.

info

Make sure you run the npm/yarn/pnpm command install command manually if you +don't ask the wizard to do it for you.

Define the database schema

Let's create a new directory to store our migration files:

mkdir migrations

Then we'll create a migration file named 001.do.sql in the migrations +directory:

CREATE TABLE quotes (
id INTEGER PRIMARY KEY,
quote TEXT NOT NULL,
said_by VARCHAR(255) NOT NULL,
created_at DATETIME DEFAULT CURRENT_TIMESTAMP
);

Now let's setup migrations in our Platformatic configuration +file, platformatic.db.json:

{
"$schema": "https://platformatic.dev/schemas/v0.23.2/db",
"server": {
"hostname": "{PLT_SERVER_HOSTNAME}",
"port": "{PORT}",
"logger": {
"level": "{PLT_SERVER_LOGGER_LEVEL}"
}
},
"db": {
"connectionString": "{DATABASE_URL}",
"graphql": true,
"openapi": true
},
"plugins": {
"paths": [
"plugin.js"
]
},
"types": {
"autogenerate": true
},
"migrations": {
"dir": "migrations",
"autoApply": true
}
}
info

Take a look at the Configuration reference +to see all the supported configuration settings.

Now we can start the Platformatic DB server:

npm run start

Our Platformatic DB server should start, and we'll see messages like these:

[11:26:48.772] INFO (15235): running 001.do.sql
[11:26:48.864] INFO (15235): server listening
url: "http://127.0.0.1:3042"

Let's open a new terminal and make a request to our server's REST API that +creates a new quote:

curl --request POST --header "Content-Type: application/json" \
-d "{ \"quote\": \"Toto, I've got a feeling we're not in Kansas anymore.\", \"saidBy\": \"Dorothy Gale\" }" \
http://localhost:3042/quotes

We should receive a response like this from the API:

{"id":1,"quote":"Toto, I've got a feeling we're not in Kansas anymore.","saidBy":"Dorothy Gale","createdAt":"1684167422600"}

Create an entity relationship

Now let's create a migration file named 002.do.sql in the migrations +directory:

CREATE TABLE movies (
id INTEGER PRIMARY KEY,
name TEXT NOT NULL UNIQUE
);

ALTER TABLE quotes ADD COLUMN movie_id INTEGER REFERENCES movies(id);

This SQL will create a new movies database table and also add a movie_id +column to the quotes table. This will allow us to store movie data in the +movies table and then reference them by ID in our quotes table.

Let's stop the Platformatic DB server with Ctrl + C, and then start it again:

npm run start

The new migration should be automatically applied and we'll see the log message +running 002.do.sql.

Our Platformatic DB server also provides a GraphQL API. Let's open up the GraphiQL +application in our web browser:

http://localhost:3042/graphiql

Now let's run this query with GraphiQL to add the movie for the quote that we +added earlier:

mutation {
saveMovie(input: { name: "The Wizard of Oz" }) {
id
}
}

We should receive a response like this from the API:

{
"data": {
"saveMovie": {
"id": "1"
}
}
}

Now we can update our quote to reference the movie:

mutation {
saveQuote(input: { id: 1, movieId: 1 }) {
id
quote
saidBy
createdAt
movie {
id
name
}
}
}

We should receive a response like this from the API:

{
"data": {
"saveQuote": {
"id": "1",
"quote": "Toto, I've got a feeling we're not in Kansas anymore.",
"saidBy": "Dorothy Gale",
"movie": {
"id": "1",
"name": "The Wizard of Oz"
}
}
}
}

Our Platformatic DB server has automatically identified the relationship +between our quotes and movies database tables. This allows us to make +GraphQL queries that retrieve quotes and their associated movies at the same +time. For example, to retrieve all quotes from our database we can run:

query {
quotes {
id
quote
saidBy
createdAt
movie {
id
name
}
}
}

To view the GraphQL schema that's generated for our API by Platformatic DB, +we can run this command in our terminal:

npx platformatic db schema graphql

The GraphQL schema shows all of the queries and mutations that we can run +against our GraphQL API, as well as the types of data that it expects as input.

Populate the database

Our movie quotes database is looking a little empty! We're going to create a +"seed" script to populate it with some data.

Let's create a new file named seed.js and copy and paste in this code:

'use strict'

const quotes = [
{
quote: "Toto, I've got a feeling we're not in Kansas anymore.",
saidBy: 'Dorothy Gale',
movie: 'The Wizard of Oz'
},
{
quote: "You're gonna need a bigger boat.",
saidBy: 'Martin Brody',
movie: 'Jaws'
},
{
quote: 'May the Force be with you.',
saidBy: 'Han Solo',
movie: 'Star Wars'
},
{
quote: 'I have always depended on the kindness of strangers.',
saidBy: 'Blanche DuBois',
movie: 'A Streetcar Named Desire'
}
]

module.exports = async function ({ entities, db, sql }) {
for (const values of quotes) {
const movie = await entities.movie.save({ input: { name: values.movie } })

console.log('Created movie:', movie)

const quote = {
quote: values.quote,
saidBy: values.saidBy,
movieId: movie.id
}

await entities.quote.save({ input: quote })

console.log('Created quote:', quote)
}
}
info

Take a look at the Seed a Database guide to learn more +about how database seeding works with Platformatic DB.

Let's stop our Platformatic DB server running and remove our SQLite database:

rm db.sqlite

Now let's create a fresh SQLite database by running our migrations:

npx platformatic db migrations apply

And then let's populate the quotes and movies tables with data using our +seed script:

npx platformatic db seed seed.js

Our database is full of data, but we don't have anywhere to display it. It's +time to start building our frontend!

Build the frontend

We're now going to use Astro to build our frontend +application. If you've not used it before, you might find it helpful +to read this overview +on how Astro components are structured.

tip

Astro provide some extensions and tools to help improve your +Editor Setup when building an +Astro application.

Create an Astro application

In the root tutorial-movie-quotes-app of our project, let's create a new directory for our frontent +application:

mkdir -p apps/movie-quotes-frontend/

cd apps/movie-quotes-frontend/

And then we'll create a new Astro project:

npm create astro@latest -- --template basics

It will ask you some questions about how you'd like to set up +your new Astro project. For this guide, select these options:

Where should we create your new project?

   .
◼ tmpl Using basics as project template
✔ Template copied

Install dependencies? (it's buggy, we'll do it afterwards)

   No
◼ No problem! Remember to install dependencies after setup.

Do you plan to write TypeScript?

   No
◼ No worries! TypeScript is supported in Astro by default, but you are free to continue writing JavaScript instead.

Initialize a new git repository?

   No
◼ Sounds good! You can always run git init manually.

Liftoff confirmed. Explore your project!
Run npm dev to start the dev server. CTRL+C to stop.
Add frameworks like react or tailwind using astro add.

Now we'll edit our Astro configuration file, astro.config.mjs and +copy and paste in this code:

import { defineConfig } from 'astro/config'

// https://astro.build/config
export default defineConfig({
output: 'server'
})

And we'll also edit our tsconfig.json file and add in this configuration:

{
"extends": "astro/tsconfigs/base",
"compilerOptions": {
"types": ["astro/client"]
}
}

Now we can start up the Astro development server with:

npm run dev

And then load up the frontend in our browser at http://localhost:3000

Now that everything is working, we'll remove all default *.astro files from the src/ directory, but we'll keep the directory structure. You can delete them now, or override them later.

Create a layout

In the src/layouts directory, let's create a new file named Layout.astro:

---
export interface Props {
title: string;
page?: string;
}
const { title, page } = Astro.props;
---

<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width" />
<title>{title}</title>
</head>
<body>
<header>
<h1>🎬 Movie Quotes</h1>
</header>
<nav>
<a href="/">All quotes</a>
</nav>
<section>
<slot />
</section>
</body>
</html>

The code between the --- is known as the component script, and the +code after that is the component template. The component script will only run +on the server side when a web browser makes a request. The component template +is rendered server side and sent back as an HTML response to the web browser.

Now we'll update src/pages/index.astro to use this Layout component. +Let's replace the contents of src/pages/index.astro with this code:

---
import Layout from '../layouts/Layout.astro';
---

<Layout title="All quotes" page="listing">
<main>
<p>We'll list all the movie quotes here.</p>
</main>
</Layout>

Integrate the urql GraphQL client

We're now going to integrate the URQL +GraphQL client into our frontend application. This will allow us to run queries +and mutations against our Platformatic GraphQL API.

Let's first install @urql/core and +graphql as project dependencies:

npm install @urql/core graphql

Then let's create a new .env file and add this configuration:

PUBLIC_GRAPHQL_API_ENDPOINT=http://127.0.0.1:3042/graphql

Now we'll create a new directory:

mkdir src/lib

And then create a new file named src/lib/quotes-api.js. In that file we'll +create a new URQL client:

// src/lib/quotes-api.js

import { createClient, cacheExchange, fetchExchange } from '@urql/core';

const graphqlClient = createClient({
url: import.meta.env.PUBLIC_GRAPHQL_API_ENDPOINT,
requestPolicy: "network-only",
exchanges: [cacheExchange, fetchExchange]
});

We'll also add a thin wrapper around the client that does some basic error +handling for us:

// src/lib/quotes-api.js

async function graphqlClientWrapper(method, gqlQuery, queryVariables = {}) {
const queryResult = await graphqlClient[method](
gqlQuery,
queryVariables
).toPromise();

if (queryResult.error) {
console.error("GraphQL error:", queryResult.error);
}

return {
data: queryResult.data,
error: queryResult.error,
};
}

export const quotesApi = {
async query(gqlQuery, queryVariables = {}) {
return await graphqlClientWrapper("query", gqlQuery, queryVariables);
},
async mutation(gqlQuery, queryVariables = {}) {
return await graphqlClientWrapper("mutation", gqlQuery, queryVariables);
}
}

And lastly, we'll export gql from the @urql/core package, to make it +simpler for us to write GraphQL queries in our pages:

// src/lib/quotes-api.js

export { gql } from "@urql/core";

Stop the Astro dev server and then start it again so it picks up the .env +file:

npm run dev

Display all quotes

Let's display all the movie quotes in src/pages/index.astro.

First, we'll update the component script at the top and add in a query to +our GraphQL API for quotes:

---
import Layout from '../layouts/Layout.astro';
import { quotesApi, gql } from '../lib/quotes-api';

const { data } = await quotesApi.query(gql`
query {
quotes {
id
quote
saidBy
createdAt
movie {
id
name
}
}
}
`);

const quotes = data?.quotes || [];
---

Then we'll update the component template to display the quotes:

<Layout title="All quotes" page="listing">
<main>
{quotes.length > 0 ? quotes.map((quote) => (
<div>
<blockquote>
<p>{quote.quote}</p>
</blockquote>
<p>
{quote.saidBy}, {quote.movie?.name}
</p>
<div>
<span>Added {new Date(Number(quote.createdAt)).toUTCString()}</span>
</div>
</div>
)) : (
<p>No movie quotes have been added.</p>
)}
</main>
</Layout>

And just like that, we have all the movie quotes displaying on the page!

Integrate Tailwind for styling

Automatically add the @astrojs/tailwind integration:

npx astro add tailwind --yes

Add the Tailwind CSS Typography +and Forms plugins:

npm install --save-dev @tailwindcss/typography @tailwindcss/forms

Import the plugins in our Tailwind configuration file:

// tailwind.config.cjs

/** @type {import('tailwindcss').Config} */
module.exports = {
content: ['./src/**/*.{astro,html,js,jsx,md,mdx,svelte,ts,tsx,vue}'],
theme: {
extend: {}
},
plugins: [
require('@tailwindcss/forms'),
require('@tailwindcss/typography')
]
}

Stop the Astro dev server and then start it again so it picks up all the +configuration changes:

npm run dev

Style the listing page

To style our listing page, let's add CSS classes to the component template in +src/layouts/Layout.astro:

---
export interface Props {
title: string;
page?: string;
}

const { title, page } = Astro.props;

const navActiveClasses = "font-bold bg-yellow-400 no-underline";
---

<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width" />
<title>{title}</title>
</head>
<body class="py-8">
<header class="prose mx-auto mb-6">
<h1>🎬 Movie Quotes</h1>
</header>
<nav class="prose mx-auto mb-6 border-y border-gray-200 flex">
<a href="/" class={`p-3 ${page === "listing" && navActiveClasses}`}>All quotes</a>
</nav>
<section class="prose mx-auto">
<slot />
</section>
</body>
</html>

Then let's add CSS classes to the component template in src/pages/index.astro:

<Layout title="All quotes">
<main>
{quotes.length > 0 ? quotes.map((quote) => (
<div class="border-b mb-6">
<blockquote class="text-2xl mb-0">
<p class="mb-4">{quote.quote}</p>
</blockquote>
<p class="text-xl mt-0 mb-8 text-gray-400">
{quote.saidBy}, {quote.movie?.name}
</p>
<div class="flex flex-col mb-6 text-gray-400">
<span class="text-gray-400 italic">Added {new Date(Number(quote.createdAt)).toUTCString()}</span>
</div>
</div>
)) : (
<p>No movie quotes have been added.</p>
)}
</main>
</Layout>

Our listing page is now looking much more user friendly!

Create an add quote page

We're going to create a form component that we can use for adding and editing +quotes.

First let's create a new component file, src/components/QuoteForm.astro:

---
export interface QuoteFormData {
id?: number;
quote?: string;
saidBy?: string;
movie?: string;
}

export interface Props {
action: string;
values?: QuoteFormData;
saveError?: boolean;
loadError?: boolean;
submitLabel: string;
}

const { action, values = {}, saveError, loadError, submitLabel } = Astro.props;
---

{saveError && <p class="text-lg bg-red-200 p-4">There was an error saving the quote. Please try again.</p>}
{loadError && <p class="text-lg bg-red-200 p-4">There was an error loading the quote. Please try again.</p>}

<form method="post" action={action} class="grid grid-cols-1 gap-6">
<label for="quote" class="block">
<span>Quote</span>
<textarea id="quote" name="quote" required="required" class="mt-1 w-full">{values.quote}</textarea>
</label>
<label for="said-by" class="block">
<span>Said by</span>
<input type="text" id="said-by" name="saidBy" required="required" value={values.saidBy} class="mt-1 w-full">
</label>
<label for="movie" class="block">
<span>Movie</span>
<input type="text" id="movie" name="movie" required="required" autocomplete="off" value={values.movie} class="form-input mt-1 w-full">
</label>
<input type="submit" value={submitLabel} disabled={loadError && "disabled"} class="bg-yellow-400 hover:bg-yellow-500 text-gray-900 round p-3" />
</form>

Create a new page file, src/pages/add.astro:

---
import Layout from '../layouts/Layout.astro';
import QuoteForm from '../components/QuoteForm.astro';
import type { QuoteFormData } from '../components/QuoteForm.astro';

let formData: QuoteFormData = {};
let saveError = false;
---

<Layout title="Add a movie quote" page="add">
<main>
<h2>Add a quote</h2>
<QuoteForm action="/add" values={formData} saveError={saveError} submitLabel="Add quote" />
</main>
</Layout>

And now let's add a link to this page in the layout navigation in src/layouts/Layout.astro:

<nav class="prose mx-auto mb-6 border-y border-gray-200 flex">
<a href="/" class={`p-3 ${page === "listing" && navActiveClasses}`}>All quotes</a>
<a href="/add" class={`p-3 ${page === "add" && navActiveClasses}`}>Add a quote</a>
</nav>

Send form data to the API

When a user submits the add quote form we want to send the form data to our API +so it can then save it to our database. Let's wire that up now.

First we're going to create a new file, src/lib/request-utils.js:

export function isPostRequest (request) {
return request.method === 'POST'
}

export async function getFormData (request) {
const formData = await request.formData()

return Object.fromEntries(formData.entries())
}

Then let's update the component script in src/pages/add.astro to use +these new request utility functions:

---
import Layout from '../layouts/Layout.astro';
import QuoteForm from '../components/QuoteForm.astro';
import type { QuoteFormData } from '../components/QuoteForm.astro';

import { isPostRequest, getFormData } from '../lib/request-utils';

let formData: QuoteFormData = {};
let saveError = false;

if (isPostRequest(Astro.request)) {
formData = await getFormData(Astro.request);
}
---

When we create a new quote entity record via our API, we need to include a +movieId field that references a movie entity record. This means that when a +user submits the add quote form we need to:

  • Check if a movie entity record already exists with that movie name
  • Return the movie id if it does exist
  • If it doesn't exist, create a new movie entity record and return the movie ID

Let's update the import statement at the top of src/lib/quotes-api.js

-import { createClient } from '@urql/core'
+import { createClient, gql } from '@urql/core'

And then add a new method that will return a movie ID for us:

async function getMovieId (movieName) {
movieName = movieName.trim()

let movieId = null

// Check if a movie already exists with the provided name.
const queryMoviesResult = await quotesApi.query(
gql`
query ($movieName: String!) {
movies(where: { name: { eq: $movieName } }) {
id
}
}
`,
{ movieName }
)

if (queryMoviesResult.error) {
return null
}

const movieExists = queryMoviesResult.data?.movies.length === 1
if (movieExists) {
movieId = queryMoviesResult.data.movies[0].id
} else {
// Create a new movie entity record.
const saveMovieResult = await quotesApi.mutation(
gql`
mutation ($movieName: String!) {
saveMovie(input: { name: $movieName }) {
id
}
}
`,
{ movieName }
)

if (saveMovieResult.error) {
return null
}

movieId = saveMovieResult.data?.saveMovie.id
}

return movieId
}

And let's export it too:

export const quotesApi = {
async query (gqlQuery, queryVariables = {}) {
return await graphqlClientWrapper('query', gqlQuery, queryVariables)
},
async mutation (gqlQuery, queryVariables = {}) {
return await graphqlClientWrapper('mutation', gqlQuery, queryVariables)
},
getMovieId
}

Now we can wire up the last parts in the src/pages/add.astro component +script:

---
import Layout from '../layouts/Layout.astro';
import QuoteForm from '../components/QuoteForm.astro';
import type { QuoteFormData } from '../components/QuoteForm.astro';

import { quotesApi, gql } from '../lib/quotes-api';
import { isPostRequest, getFormData } from '../lib/request-utils';

let formData: QuoteFormData = {};
let saveError = false;

if (isPostRequest(Astro.request)) {
formData = await getFormData(Astro.request);

const movieId = await quotesApi.getMovieId(formData.movie);

if (movieId) {
const quote = {
quote: formData.quote,
saidBy: formData.saidBy,
movieId,
};

const { error } = await quotesApi.mutation(gql`
mutation($quote: QuoteInput!) {
saveQuote(input: $quote) {
id
}
}
`, { quote });

if (!error) {
return Astro.redirect('/');
} else {
saveError = true;
}
} else {
saveError = true;
}
}

Add autosuggest for movies

We can create a better experience for our users by autosuggesting the movie name +when they're adding a new quote.

Let's open up src/components/QuoteForm.astro and import our API helper methods +in the component script:

import { quotesApi, gql } from '../lib/quotes-api.js';

Then let's add in a query to our GraphQL API for all movies:

const { data } = await quotesApi.query(gql`
query {
movies {
name
}
}
`);

const movies = data?.movies || [];

Now lets update the Movie field in the component template to use the +array of movies that we've retrieved from the API:

<label for="movie" class="block">
<span>Movie</span>
<input list="movies" id="movie" name="movie" required="required" autocomplete="off" value={values.movie} class="form-input mt-1 w-full">
<datalist id="movies">
{movies.map(({ name }) => (
<option>{name}</option>
))}
</datalist>
</label>

Create an edit quote page

Let's create a new directory, src/pages/edit/:

mkdir src/pages/edit/

And inside of it, let's create a new page, [id].astro:

---
import Layout from '../../layouts/Layout.astro';
import QuoteForm, { QuoteFormData } from '../../components/QuoteForm.astro';

const id = Number(Astro.params.id);

let formValues: QuoteFormData = {};
let loadError = false;
let saveError = false;
---

<Layout title="Edit movie quote">
<main>
<h2>Edit quote</h2>
<QuoteForm action={`/edit/${id}`} values={formValues} saveError={saveError} loadError={loadError} submitLabel="Update quote" />
</main>
</Layout>

You'll see that we're using the same QuoteForm component that our add quote +page uses. Now we're going to wire up our edit page so that it can load an +existing quote from our API and save changes back to the API when the form is +submitted.

In the [id.astro] component script, let's add some code to take care of +these tasks:

---
import Layout from '../../layouts/Layout.astro';
import QuoteForm, { QuoteFormData } from '../../components/QuoteForm.astro';

import { quotesApi, gql } from '../../lib/quotes-api';
import { isPostRequest, getFormData } from '../../lib/request-utils';

const id = Number(Astro.params.id);

let formValues: QuoteFormData = {};
let loadError = false;
let saveError = false;

if (isPostRequest(Astro.request)) {
const formData = await getFormData(Astro.request);
formValues = formData;

const movieId = await quotesApi.getMovieId(formData.movie);

if (movieId) {
const quote = {
id,
quote: formData.quote,
saidBy: formData.saidBy,
movieId,
};

const { error } = await quotesApi.mutation(gql`
mutation($quote: QuoteInput!) {
saveQuote(input: $quote) {
id
}
}
`, { quote });

if (!error) {
return Astro.redirect('/');
} else {
saveError = true;
}
} else {
saveError = true;
}
} else {
const { data } = await quotesApi.query(gql`
query($id: ID!) {
getQuoteById(id: $id) {
id
quote
saidBy
movie {
id
name
}
}
}
`, { id });

if (data?.getQuoteById) {
formValues = {
...data.getQuoteById,
movie: data.getQuoteById.movie.name
};
} else {
loadError = true;
}
}
---

Load up http://localhost:3000/edit/1 in your +browser to test out the edit quote page.

Now we're going to add edit links to the quotes listing page. Let's start by +creating a new component src/components/QuoteActionEdit.astro:

---
export interface Props {
id: number;
}

const { id } = Astro.props;
---
<a href={`/edit/${id}`} class="flex items-center mr-5 text-gray-400 hover:text-yellow-600 underline decoration-yellow-600 decoration-2 underline-offset-4">
<svg class="w-6 h-6 mr-1" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24" fill="currentColor" class="w-6 h-6">
<path d="M21.731 2.269a2.625 2.625 0 00-3.712 0l-1.157 1.157 3.712 3.712 1.157-1.157a2.625 2.625 0 000-3.712zM19.513 8.199l-3.712-3.712-8.4 8.4a5.25 5.25 0 00-1.32 2.214l-.8 2.685a.75.75 0 00.933.933l2.685-.8a5.25 5.25 0 002.214-1.32l8.4-8.4z" />
<path d="M5.25 5.25a3 3 0 00-3 3v10.5a3 3 0 003 3h10.5a3 3 0 003-3V13.5a.75.75 0 00-1.5 0v5.25a1.5 1.5 0 01-1.5 1.5H5.25a1.5 1.5 0 01-1.5-1.5V8.25a1.5 1.5 0 011.5-1.5h5.25a.75.75 0 000-1.5H5.25z" />
</svg>
<span class="hover:underline hover:decoration-yellow-600">Edit</span>
</a>

Then let's import this component and use it in our listing page, +src/pages/index.astro:

---
import Layout from '../layouts/Layout.astro';
import QuoteActionEdit from '../components/QuoteActionEdit.astro';
import { quotesApi, gql } from '../lib/quotes-api';

// ...
---

<Layout title="All quotes" page="listing">
<main>
{quotes.length > 0 ? quotes.map((quote) => (
<div class="border-b mb-6">
...
<div class="flex flex-col mb-6 text-gray-400">
<span class="flex items-center">
<QuoteActionEdit id={quote.id} />
</span>
<span class="mt-4 text-gray-400 italic">Added {new Date(Number(quote.createdAt)).toUTCString()}</span>
</div>
</div>
)) : (
<p>No movie quotes have been added.</p>
)}
</main>
</Layout>

Add delete quote functionality

Our Movie Quotes app can create, retrieve and update quotes. Now we're going +to implement the D in CRUD — delete!

First let's create a new component, src/components/QuoteActionDelete.astro:

---
export interface Props {
id: number;
}

const { id } = Astro.props;
---
<form method="POST" action={`/delete/${id}`} class="form-delete-quote m-0">
<button type="submit" class="flex items-center text-gray-400 hover:text-red-700 underline decoration-red-700 decoration-2 underline-offset-4">
<svg class="w-6 h-6 mr-1" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24" fill="currentColor" class="w-6 h-6">
<path fill-rule="evenodd" d="M12 2.25c-5.385 0-9.75 4.365-9.75 9.75s4.365 9.75 9.75 9.75 9.75-4.365 9.75-9.75S17.385 2.25 12 2.25zm-1.72 6.97a.75.75 0 10-1.06 1.06L10.94 12l-1.72 1.72a.75.75 0 101.06 1.06L12 13.06l1.72 1.72a.75.75 0 101.06-1.06L13.06 12l1.72-1.72a.75.75 0 10-1.06-1.06L12 10.94l-1.72-1.72z" clip-rule="evenodd" />
</svg>
<span>Delete</span>
</button>
</form>

And then we'll drop it into our listing page, src/pages/index.astro:

---
import Layout from '../layouts/Layout.astro';
import QuoteActionEdit from '../components/QuoteActionEdit.astro';
import QuoteActionDelete from '../components/QuoteActionDelete.astro';
import { quotesApi, gql } from '../lib/quotes-api';

// ...
---

<Layout title="All quotes" page="listing">
<main>
{quotes.length > 0 ? quotes.map((quote) => (
<div class="border-b mb-6">
...
<div class="flex flex-col mb-6 text-gray-400">
<span class="flex items-center">
<QuoteActionEdit id={quote.id} />
<QuoteActionDelete id={quote.id} />
</span>
<span class="mt-4 text-gray-400 italic">Added {new Date(Number(quote.createdAt)).toUTCString()}</span>
</div>
</div>
...

At the moment when a delete form is submitted from our listing page, we get +an Astro 404 page. Let's fix this by creating a new directory, src/pages/delete/:

mkdir src/pages/delete/

And inside of it, let's create a new page, [id].astro:

---
import Layout from '../../layouts/Layout.astro';

import { quotesApi, gql } from '../../lib/quotes-api';
import { isPostRequest } from '../../lib/request-utils';

if (isPostRequest(Astro.request)) {
const id = Number(Astro.params.id);

const { error } = await quotesApi.mutation(gql`
mutation($id: ID!) {
deleteQuotes(where: { id: { eq: $id }}) {
id
}
}
`, { id });

if (!error) {
return Astro.redirect('/');
}
}
---
<Layout title="Delete movie quote">
<main>
<h2>Delete quote</h2>
<p class="text-lg bg-red-200 p-4">There was an error deleting the quote. Please try again.</p>
</main>
</Layout>

Now if we click on a delete quote button on our listings page, it should call our +GraphQL API to delete the quote. To make this a little more user friendly, let's +add in a confirmation dialog so that users don't delete a quote by accident.

Let's create a new directory, src/scripts/:

mkdir src/scripts/

And inside of that directory let's create a new file, quote-actions.js:

// src/scripts/quote-actions.js

export function confirmDeleteQuote (form) {
if (confirm('Are you sure want to delete this quote?')) {
form.submit()
}
}

Then we can pull it in as client side JavaScript on our listing page, +src/pages/index.astro:

<Layout>
...
</Layout>

<script>
import { confirmDeleteQuote } from '../scripts/quote-actions.js'

addEventListener('DOMContentLoaded', () => {
document.querySelectorAll('.form-delete-quote').forEach((deleteForm) => {
deleteForm.addEventListener('submit', (event) => {
event.preventDefault()
confirmDeleteQuote(event.currentTarget)
})
})
})
</script>

Build a "like" quote feature

We've built all the basic CRUD (Create, Retrieve, Update & Delete) features +into our application. Now let's build a feature so that users can interact +and "like" their favourite movie quotes.

To build this feature we're going to add custom functionality to our API +and then add a new component, along with some client side JavaScript, to +our frontend.

Create an API migration

We're now going to work on the code for API, under the apps/movie-quotes-api +directory.

First let's create a migration that adds a likes column to our quotes +database table. We'll create a new migration file, migrations/003.do.sql:

ALTER TABLE quotes ADD COLUMN likes INTEGER default 0;

This migration will automatically be applied when we next start our Platformatic +API.

Create an API plugin

To add custom functionality to our Platformatic API, we need to create a +Fastify plugin and +update our API configuration to use it.

Let's create a new file, plugin.js, and inside it we'll add the skeleton +structure for our plugin:

// plugin.js

'use strict'

module.exports = async function plugin (app) {
app.log.info('plugin loaded')
}

Now let's register our plugin in our API configuration file, platformatic.db.json:

{
...
"migrations": {
"dir": "./migrations"
},
"plugins": {
"paths": ["./plugin.js"]
}
}

And then we'll start up our Platformatic API:

npm run dev

We should see log messages that tell us that our new migration has been +applied and our plugin has been loaded:

[10:09:20.052] INFO (146270): running 003.do.sql
[10:09:20.129] INFO (146270): plugin loaded
[10:09:20.209] INFO (146270): server listening
url: "http://127.0.0.1:3042"

Now it's time to start adding some custom functionality inside our plugin.

Add a REST API route

We're going to add a REST route to our API that increments the count of +likes for a specific quote: /quotes/:id/like

First let's add fluent-json-schema as a dependency for our API:

npm install fluent-json-schema

We'll use fluent-json-schema to help us generate a JSON Schema. We can then +use this schema to validate the request path parameters for our route (id).

tip

You can use fastify-type-provider-typebox or typebox if you want to convert your JSON Schema into a Typescript type. See this GitHub thread to have a better overview about it. Look at the example below to have a better overview.

Here you can see in practice of to leverage typebox combined with fastify-type-provider-typebox:

import { FastifyInstance } from "fastify";
import { Static, Type } from "@sinclair/typebox";
import { TypeBoxTypeProvider } from "@fastify/type-provider-typebox";

/**
* Creation of the JSON schema needed to validate the params passed to the route
*/
const schemaParams = Type.Object({
num1: Type.Number(),
num2: Type.Number(),
});

/**
* We convert the JSON schema to the TypeScript type, in this case:
* {
num1: number;
num2: number;
}
*/
type Params = Static<typeof schemaParams>;

/**
* Here we can pass the type previously created to our syncronous unit function
*/
const multiplication = ({ num1, num2 }: Params) => num1 * num2;

export default async function (app: FastifyInstance) {
app.withTypeProvider<TypeBoxTypeProvider>().get(
"/multiplication/:num1/:num2",
{ schema: { params: schemaParams } },
/**
* Since we leverage `withTypeProvider<TypeBoxTypeProvider>()`,
* we no longer need to explicitly define the `params`.
* The will be automatically inferred as:
* {
num1: number;
num2: number;
}
*/
({ params }) => multiplication(params)
);
}

Now let's add our REST API route in plugin.js:

'use strict'

const S = require('fluent-json-schema')

module.exports = async function plugin (app) {
app.log.info('plugin loaded')

// This JSON Schema will validate the request path parameters.
// It reuses part of the schema that Platormatic DB has
// automatically generated for our Quote entity.
const schema = {
params: S.object().prop('id', app.getSchema('Quote').properties.id)
}

app.post('/quotes/:id/like', { schema }, async function (request, response) {
return {}
})
}

We can now make a POST request to our new API route:

curl --request POST http://localhost:3042/quotes/1/like
info

Learn more about how validation works in the +Fastify validation documentation.

Our API route is currently returning an empty object ({}). Let's wire things +up so that it increments the number of likes for the quote with the specified ID. +To do this we'll add a new function inside of our plugin:

module.exports = async function plugin (app) {
app.log.info('plugin loaded')

async function incrementQuoteLikes (id) {
const { db, sql } = app.platformatic

const result = await db.query(sql`
UPDATE quotes SET likes = likes + 1 WHERE id=${id} RETURNING likes
`)

return result[0]?.likes
}

// ...
}

And then we'll call that function in our route handler function:

app.post('/quotes/:id/like', { schema }, async function (request, response) {
return { likes: await incrementQuoteLikes(request.params.id) }
})

Now when we make a POST request to our API route:

curl --request POST http://localhost:3042/quotes/1/like

We should see that the likes value for the quote is incremented every time +we make a request to the route.

{"likes":1}

Add a GraphQL API mutation

We can add a likeQuote mutation to our GraphQL API by reusing the +incrementQuoteLikes function that we just created.

Let's add this code at the end of our plugin, inside plugin.js:

module.exports = async function plugin (app) {
// ...

app.graphql.extendSchema(`
extend type Mutation {
likeQuote(id: ID!): Int
}
`)

app.graphql.defineResolvers({
Mutation: {
likeQuote: async (_, { id }) => await incrementQuoteLikes(id)
}
})
}

The code we've just added extends our API's GraphQL schema and defines +a corresponding resolver for the likeQuote mutation.

We can now load up GraphiQL in our web browser and try out our new likeQuote +mutation with this GraphQL query:

mutation {
likeQuote(id: 1)
}
info

Learn more about how to extend the GraphQL schema and define resolvers in the +Mercurius API documentation.

Enable CORS on the API

When we build "like" functionality into our frontend, we'll be making a client +side HTTP request to our GraphQL API. Our backend API and our frontend are running +on different origins, so we need to configure our API to allow requests from +the frontend. This is known as Cross-Origin Resource Sharing (CORS).

To enable CORS on our API, let's open up our API's .env file and add in +a new setting:

PLT_SERVER_CORS_ORIGIN=http://localhost:3000

The value of PLT_SERVER_CORS_ORIGIN is our frontend application's origin.

Now we can add a cors configuration object in our API's configuration file, +platformatic.db.json:

{
"server": {
"logger": {
"level": "{PLT_SERVER_LOGGER_LEVEL}"
},
"hostname": "{PLT_SERVER_HOSTNAME}",
"port": "{PORT}",
"cors": {
"origin": "{PLT_SERVER_CORS_ORIGIN}"
}
},
...
}

The HTTP responses from all endpoints on our API will now include the header:

access-control-allow-origin: http://localhost:3000

This will allow JavaScript running on web pages under the http://localhost:3000 +origin to make requests to our API.

Add like quote functionality

Now that our API supports "liking" a quote, let's integrate it as a feature in +our frontend.

First we'll create a new component, src/components/QuoteActionLike.astro:

---
export interface Props {
id: number;
likes: number;
}

const { id, likes } = Astro.props;
---
<span data-quote-id={id} class="like-quote cursor-pointer mr-5 flex items-center">
<svg class="like-icon w-6 h-6 mr-2 text-red-600" xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 24 24" stroke-width="1.5" stroke="currentColor" class="w-6 h-6">
<path stroke-linecap="round" stroke-linejoin="round" d="M21 8.25c0-2.485-2.099-4.5-4.688-4.5-1.935 0-3.597 1.126-4.312 2.733-.715-1.607-2.377-2.733-4.313-2.733C5.1 3.75 3 5.765 3 8.25c0 7.22 9 12 9 12s9-4.78 9-12z" />
</svg>
<span class="likes-count w-8">{likes}</span>
</span>

<style>
.like-quote:hover .like-icon,
.like-quote.liked .like-icon {
fill: currentColor;
}
</style>

And in our listing page, src/pages/index.astro, let's import our new +component and add it into the interface:

---
import Layout from '../layouts/Layout.astro';
import QuoteActionEdit from '../components/QuoteActionEdit.astro';
import QuoteActionDelete from '../components/QuoteActionDelete.astro';
import QuoteActionLike from '../components/QuoteActionLike.astro';
import { quotesApi, gql } from '../lib/quotes-api';

// ...
---

<Layout title="All quotes" page="listing">
<main>
{quotes.length > 0 ? quotes.map((quote) => (
<div class="border-b mb-6">
...
<div class="flex flex-col mb-6 text-gray-400">
<span class="flex items-center">
<QuoteActionLike id={quote.id} likes={quote.likes} />
<QuoteActionEdit id={quote.id} />
<QuoteActionDelete id={quote.id} />
</span>
<span class="mt-4 text-gray-400 italic">Added {new Date(Number(quote.createdAt)).toUTCString()}</span>
</div>
</div>
...

Then let's update the GraphQL query in this component's script to retrieve the +likes field for all quotes:

const { data } = await quotesApi.query(gql`
query {
quotes {
id
quote
saidBy
likes
createdAt
movie {
id
name
}
}
}
`);

Now we have the likes showing for each quote, let's wire things up so that +clicking on the like component for a quote will call our API and add a like.

Let's open up src/scripts/quote-actions.js and add a new function that +makes a request to our GraphQL API:

import { quotesApi, gql } from '../lib/quotes-api.js'

export function confirmDeleteQuote (form) {
if (confirm('Are you sure want to delete this quote?')) {
form.submit()
}
}

export async function likeQuote (likeQuote) {
likeQuote.classList.add('liked')
likeQuote.classList.remove('cursor-pointer')

const id = Number(likeQuote.dataset.quoteId)

const { data } = await quotesApi.mutation(gql`
mutation($id: ID!) {
likeQuote(id: $id)
}
`, { id })

if (data?.likeQuote) {
likeQuote.querySelector('.likes-count').innerText = data.likeQuote
}
}

And then let's attach the likeQuote function to the click event for each +like quote component on our listing page. We can do this by adding a little +extra code inside the <script> block in src/pages/index.astro:

<script>
import { confirmDeleteQuote, likeQuote } from '../scripts/quote-actions.js'

addEventListener('DOMContentLoaded', () => {
document.querySelectorAll('.form-delete-quote').forEach((deleteForm) => {
deleteForm.addEventListener('submit', (event) => {
event.preventDefault()
confirmDeleteQuote(event.currentTarget)
})
})

document.querySelectorAll('.like-quote').forEach((container) => {
container.addEventListener('click', (event) => likeQuote(event.currentTarget), { once: true })
})
})
</script>

Sort the listing by top quotes

Now that users can like their favourite quotes, as a final step, we'll allow +for sorting quotes on the listing page by the number of likes they have.

Let's update src/pages/index.astro to read a sort query string parameter +and use it the GraphQL query that we make to our API:

---
// ...

const allowedSortFields = ["createdAt", "likes"];
const searchParamSort = new URL(Astro.request.url).searchParams.get("sort");
const sort = allowedSortFields.includes(searchParamSort) ? searchParamSort : "createdAt";

const { data } = await quotesApi.query(gql`
query {
quotes(orderBy: {field: ${sort}, direction: DESC}) {
id
quote
saidBy
likes
createdAt
movie {
id
name
}
}
}
`);

const quotes = data?.quotes || [];
---
<Layout title="All quotes" page={`listing-${sort}`}>
...

Then let's replace the 'All quotes' link in the <nav> in src/layouts/Layout.astro +with two new links:

<nav class="prose mx-auto mb-6 border-y border-gray-200 flex">
<a href="/?sort=createdAt" class={`p-3 ${page === "listing-createdAt" && navActiveClasses}`}>Latest quotes</a>
<a href="/?sort=likes" class={`p-3 ${page === "listing-likes" && navActiveClasses}`}>Top quotes</a>
<a href="/add" class={`p-3 ${page === "add" && navActiveClasses}`}>Add a quote</a>
</nav>

With these few extra lines of code, our users can now sort quotes by when they +were created or by the number of likes that they have. Neat!

Wrapping up

And we're done — you now have the knowledge you need to build a full stack +application on top of Platformatic DB.

We can't wait to see what you'll build next!

+ + + + \ No newline at end of file diff --git a/docs/0.41.1/getting-started/new-api-project-instructions/index.html b/docs/0.41.1/getting-started/new-api-project-instructions/index.html new file mode 100644 index 00000000000..bb09b375322 --- /dev/null +++ b/docs/0.41.1/getting-started/new-api-project-instructions/index.html @@ -0,0 +1,20 @@ + + + + + +new-api-project-instructions | Platformatic Open Source Software + + + + + +
+
Version: 0.41.1

new-api-project-instructions

Run this command in your terminal to start the Platformatic creator wizard:

npm create platformatic@latest

This interactive command-line tool will ask you some questions about how you'd +like to set up your new Platformatic project. For this guide, select these options:

- Which kind of project do you want to create?  => DB
- Where would you like to create your project? => quick-start
- Do you want to create default migrations? => Yes
- Do you want to create a plugin? => Yes
- Do you want to use TypeScript? => No
- Do you want to install dependencies? => Yes (this can take a while)
- Do you want to apply the migrations? => Yes
- Do you want to generate types? => Yes
- Do you want to create the github action to deploy this application to Platformatic Cloud dynamic workspace? => No
- Do you want to create the github action to deploy this application to Platformatic Cloud static workspace? => No

Once the wizard is complete, you'll have a Platformatic app project in the +folder quick-start, with example migration files and a plugin script.

info

Make sure you run the npm/yarn/pnpm command install command manually if you +don't ask the wizard to do it for you.

+ + + + \ No newline at end of file diff --git a/docs/0.41.1/getting-started/quick-start-guide/index.html b/docs/0.41.1/getting-started/quick-start-guide/index.html new file mode 100644 index 00000000000..5dc19a631b2 --- /dev/null +++ b/docs/0.41.1/getting-started/quick-start-guide/index.html @@ -0,0 +1,38 @@ + + + + + +Quick Start Guide | Platformatic Open Source Software + + + + + +
+
Version: 0.41.1

Quick Start Guide

In this guide you'll learn how to create and run your first API with +Platformatic DB. Let's get started!

info

This guide uses SQLite for the database, but +Platformatic DB also supports PostgreSQL, +MySQL and MariaDB databases.

Prerequisites

Platformatic supports macOS, Linux and Windows (WSL recommended).

To follow along with this guide you'll need to have these things installed:

Create a new API project

Automatic CLI

Run this command in your terminal to start the Platformatic creator wizard:

npm create platformatic@latest

This interactive command-line tool will ask you some questions about how you'd +like to set up your new Platformatic project. For this guide, select these options:

- Which kind of project do you want to create?  => DB
- Where would you like to create your project? => quick-start
- Do you want to create default migrations? => Yes
- Do you want to create a plugin? => Yes
- Do you want to use TypeScript? => No
- Do you want to install dependencies? => Yes (this can take a while)
- Do you want to apply the migrations? => Yes
- Do you want to generate types? => Yes
- Do you want to create the github action to deploy this application to Platformatic Cloud dynamic workspace? => No
- Do you want to create the github action to deploy this application to Platformatic Cloud static workspace? => No

Once the wizard is complete, you'll have a Platformatic app project in the +folder quick-start, with example migration files and a plugin script.

info

Make sure you run the npm/yarn/pnpm command install command manually if you +don't ask the wizard to do it for you.

Start your API server

In your project directory, run this command to start your API server:

npm start

Your Platformatic API is now up and running! 🌟

This command will:

  • Automatically map your SQL database to REST and GraphQL API interfaces.
  • Start the Platformatic API server.

You can jump down to Next steps or read on to learn more about +the project files that the wizard has created for you.

Check the database schema

In your project directory (quick-start), open the migrations directory that can store your database migration files that will contain both the 001.do.sql and 001.undo.sql files. The 001.do.sql file contains the SQL statements to create the database objects, while the 001.undo.sql file contains the SQL statements to drop them.

migrations/001.do.sql
CREATE TABLE IF NOT EXISTS movies (
id INTEGER PRIMARY KEY,
title TEXT NOT NULL
);

Note that this migration has been already applied by Platformatic creator.

Check your API configuration

In your project directory, check the Platformatic configuration file named +platformatic.db.json and the environment file named .env:

The created configuration tells Platformatic to:

  • Run an API server on http://127.0.0.1:3042/
  • Connect to an SQLite database stored in a file named db.sqlite
  • Look for database migration files in the migrations directory
  • Load the plugin file named plugin.js and automatically generate types
tip

The Configuration reference explains all of the +supported configuration options.

Manual setup

Create a directory for your new API project:

mkdir quick-start

cd quick-start

Then create a package.json file and install the platformatic +CLI as a project dependency:

npm init --yes

npm install platformatic

Add a database schema

In your project directory (quick-start), create a file for your sqlite3 database and also, a migrations directory to +store your database migration files:

touch db.sqlite

mkdir migrations

Then create a new migration file named 001.do.sql in the migrations +directory.

Copy and paste this SQL query into the migration file:

migrations/001.do.sql
CREATE TABLE movies (
id INTEGER PRIMARY KEY,
title VARCHAR(255) NOT NULL
);

When it's run by Platformatic, this query will create a new database table +named movies.

tip

You can check syntax for SQL queries on the Database.Guide SQL Reference.

Configure your API

In your project directory, create a new Platformatic configuration file named +platformatic.db.json.

Copy and paste in this configuration:

platformatic.db.json
{
"server": {
"hostname": "127.0.0.1",
"port": "3042"
},
"db": {
"connectionString": "sqlite://./db.sqlite"
},
"migrations": {
"dir": "./migrations",
"autoApply": "true"
}
}

This configuration tells Platformatic to:

  • Run an API server on http://127.0.0.1:3042/
  • Connect to an SQLite database stored in a file named db.sqlite
  • Look for, and apply the database migrations specified in the migrations directory
tip

The Configuration reference explains all of the +supported configuration options.

Start your API server

In your project directory, use the Platformatic CLI to start your API server:

npx platformatic db start

This will:

  • Automatically map your SQL database to REST and GraphQL API interfaces.
  • Start the Platformatic API server.

Your Platformatic API is now up and running! 🌟

Next steps

Use the REST API interface

You can use cURL to make requests to the REST interface of your API, for example:

Create a new movie

curl -X POST -H "Content-Type: application/json" \
-d "{ \"title\": \"Hello Platformatic DB\" }" \
http://localhost:3042/movies

You should receive a response from your API like this:

{"id":1,"title":"Hello Platformatic DB"}

Get all movies

curl http://localhost:3042/movies

You should receive a response from your API like this, with an array +containing all the movies in your database:

[{"id":1,"title":"Hello Platformatic DB"}]
tip

If you would like to know more about what routes are automatically available, +take a look at the REST API reference +for an overview of the REST interface that the generated API provides.

Swagger OpenAPI documentation

You can explore the OpenAPI documentation for your REST API in the Swagger UI at +http://localhost:3042/documentation

Use the GraphQL API interface

Open http://localhost:3042/graphiql in your +web browser to explore the GraphQL interface of your API.

Try out this GraphQL query to retrieve all movies from your API:

query {
movies {
id
title
}
}
tip

Learn more about your API's GraphQL interface in the +GraphQL API reference.

+ + + + \ No newline at end of file diff --git a/docs/0.41.1/guides/add-custom-functionality/extend-graphql/index.html b/docs/0.41.1/guides/add-custom-functionality/extend-graphql/index.html new file mode 100644 index 00000000000..37728586367 --- /dev/null +++ b/docs/0.41.1/guides/add-custom-functionality/extend-graphql/index.html @@ -0,0 +1,18 @@ + + + + + +Extend GraphQL Schema | Platformatic Open Source Software + + + + + +
+
Version: 0.41.1

Extend GraphQL Schema

Sum Function

Copy and paste this code into ./sample-plugin.js file

'use strict'
module.exports = async(app, opts) => {
app.graphql.extendSchema(`
extend type Query {
add(x: Int, y: Int): Int
}
`)
app.graphql.defineResolvers({
Query: {
add: async (_, { x, y }) => x + y
}
})
}

This will add a new GraphQL query called add which will simply add the two inputs x and y provided.

You don't need to reload the server, since it will watch this file and hot-reload itself. +Let's query the server with the following body


query{
add(x: 1, y: 2)
}

You can use curl command to run this query

$ curl --location --request POST 'http://localhost:3042/graphql' \
--header 'Content-Type: application/json' \
--data-raw '{"query":"query{\n add(x: 1, y: 2)\n}"}'

You will get this output, with the sum.

{
"data": {
"add": 3
}
}

Extend Entities API

Let's implement a getPageByTitle query

'use strict'
module.exports = async(app, opts) => {
app.graphql.extendSchema(`
extend type Query {
getPageByTitle(title: String): Page
}
`)
app.graphql.defineResolvers({
Query: {
getPageByTitle: async(_, { title }) => {
const res = await app.platformatic.entities.page.find({
where: {
title: {
eq: title
}
}
})
if (res) {
return res[0]
}
return null
}
}
})
}

Page GraphQL type is already defined by Platformatic DB on start.

We are going to run this code against this GraphQL query

query{
getPageByTitle(title: "First Page"){
id
title
}
}

You can use curl command to run this query

$ curl --location --request POST 'http://localhost:3042/graphql' \
--header 'Content-Type: application/json' \
--data-raw '{"query":"query{\n getPageByTitle(title: \"First Page\"){\n id\n title\n }\n}"}'

You will get an output similar to this

{
"data": {
"getPageByTitle": {
"id": "1",
"title": "First Page"
}
}
}
+ + + + \ No newline at end of file diff --git a/docs/0.41.1/guides/add-custom-functionality/extend-rest/index.html b/docs/0.41.1/guides/add-custom-functionality/extend-rest/index.html new file mode 100644 index 00000000000..7226bc61a5a --- /dev/null +++ b/docs/0.41.1/guides/add-custom-functionality/extend-rest/index.html @@ -0,0 +1,17 @@ + + + + + +Extend REST API | Platformatic Open Source Software + + + + + +
+
Version: 0.41.1

Extend REST API

We will follow same examples implemented in GraphQL examples: a sum function and an API to get pages by title.

Sum Function

Copy and paste this code into ./sample-plugin.js file

'use strict'
module.exports = async(app, opts) => {
app.post('/sum', async(req, reply) => {
const { x, y } = req.body
return { sum: (x + y)}
})
}

You don't need to reload the server, since it will watch this file and hot-reload itself.

Let's make a POST /sum request to the server with the following body

{
"x": 1,
"y": 2
}

You can use curl command to run this query

$ curl --location --request POST 'http://localhost:3042/sum' \
--header 'Content-Type: application/json' \
--data-raw '{
"x": 1,
"y": 2
}'

You will get this output, with the sum.

{
"sum": 3
}

Extend Entities API

Let's implement a /page-by-title endpoint, using Entities API

'use strict'
module.exports = async(app, opts) => {
app.get('/page-by-title', async(req, reply) => {
const { title } = req.query
const res = await app.platformatic.entities.page.find({
where: {
title: {
eq: title
}
}
})
if (res) {
return res[0]
}
return null
})
}

We will make a GET /page-by-title?title=First%20Page request, and we expect a single page as output.

You can use curl command to run this query

$ curl --location --request GET 'http://localhost:3042/page-by-title?title=First Page'

You will get an output similar to this

{
"id": "1",
"title": "First Page",
"body": "This is the first sample page"
}
+ + + + \ No newline at end of file diff --git a/docs/0.41.1/guides/add-custom-functionality/introduction/index.html b/docs/0.41.1/guides/add-custom-functionality/introduction/index.html new file mode 100644 index 00000000000..fc7a89b1d7f --- /dev/null +++ b/docs/0.41.1/guides/add-custom-functionality/introduction/index.html @@ -0,0 +1,17 @@ + + + + + +Add Custom Functionality | Platformatic Open Source Software + + + + + +
+
Version: 0.41.1

Add Custom Functionality

If you want to extend Platformatic DB features, it is possible to register a plugin, which will be in the form of a standard Fastify plugin.

The config file will specify where the plugin file is located as the example below:

{
...
"plugins": {
"paths": ["./plugin/index.js"]
}
}

The path is relative to the config file path.

Since it uses fastify-isolate under the hood, all other options of that package may be specified under the plugin property.

Once the config file is set up, you can write your plugin

module.exports = async function (app) {
app.log.info('plugin loaded')
// Extend GraphQL Schema with resolvers
app.graphql.extendSchema(`
extend type Query {
add(x: Int, y: Int): Int
}
`)
app.graphql.defineResolvers({
Query: {
add: async (_, { x, y }) => x + y
}
})

// Create a new route, see https://www.fastify.io/docs/latest/Reference/Routes/ for more info
app.post('/sum', (req, reply) => {
const {x, y} = req.body
return { result: x + y }
})

// access platformatic entities data
app.get('/all-entities', (req, reply) => {
const entities = Object.keys(app.platformatic.entities)
return { entities }
})
}

+ + + + \ No newline at end of file diff --git a/docs/0.41.1/guides/add-custom-functionality/prerequisites/index.html b/docs/0.41.1/guides/add-custom-functionality/prerequisites/index.html new file mode 100644 index 00000000000..16e0e232cbb --- /dev/null +++ b/docs/0.41.1/guides/add-custom-functionality/prerequisites/index.html @@ -0,0 +1,17 @@ + + + + + +Prerequisites | Platformatic Open Source Software + + + + + +
+
Version: 0.41.1

Prerequisites

In the following examples we assume you already

  • cloned platformatic/platformatic repo from Github
  • ran pnpm install to install all dependencies
  • have Docker and docker-compose installed and running on your machine

Config File

Create a platformatic.db.json file in the root project, it will be loaded automatically by Platformatic (no need of -c, --config flag).

{
"server": {
"hostname": "127.0.0.1",
"port": 3042,
"logger": {
"level": "info"
}
},
"db": {
"connectionString": "postgres://postgres:postgres@127.0.0.1/postgres"
},
"migrations": {
"dir": "./migrations",
"table": "versions"
},
"plugins": {
"paths": ["plugin.js"]
}
}
  • Once Platformatic DB starts, its API will be available at http://127.0.0.1:3042
  • It will connect and read the schema from a PostgreSQL DB
  • Will read migrations from ./migrations directory
  • Will load custom functionallity from ./plugin.js file.

Database and Migrations

Start the database using the sample docker-compose.yml file.

$ docker-compose up -d postgresql

For migrations create a ./migrations directory and a 001.do.sql file with following contents

CREATE TABLE pages (
id SERIAL PRIMARY KEY,
title VARCHAR(255) NOT NULL,
body TEXT NOT NULL
);
INSERT INTO pages (title, body) VALUES ('First Page', 'This is the first sample page');
INSERT INTO pages (title, body) VALUES ('Second Page', 'This is the second sample page');
INSERT INTO pages (title, body) VALUES ('Third Page', 'This is the third sample page');

Plugin

Copy and paste this boilerplate code into ./plugin.js file. We will fill this in the examples.

'use strict'

module.exports = async (app, opts) {
// we will fill this later
}

Start the server

Run

$ platformatic db start

You will get an output similar to this

                           /////////////
///// /////
/// ///
/// ///
/// ///
&& /// /// &&
&&&&&& /// /// &&&&&&
&&&& /// /// &&&&
&&& /// /// &&&&&&&&&&&&
&&& /// /////// //// && &&&&&
&& /// /////////////// &&&
&&& /// /// &&&
&&& /// // &&
&&& /// &&
&&& /// &&&
&&&& /// &&&
&&&&&% /// &&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&
///
///
///
///
///
///

[11:19:46.562] INFO (65122): running 001.do.sql
[11:19:46.929] INFO (65122): server listening
url: "http://127.0.0.1:3042"

Now is possible to create some examples, like extend GraphQL Schema, extend REST API

+ + + + \ No newline at end of file diff --git a/docs/0.41.1/guides/add-custom-functionality/raw-sql/index.html b/docs/0.41.1/guides/add-custom-functionality/raw-sql/index.html new file mode 100644 index 00000000000..af24bd31e00 --- /dev/null +++ b/docs/0.41.1/guides/add-custom-functionality/raw-sql/index.html @@ -0,0 +1,17 @@ + + + + + +Raw SQL queries | Platformatic Open Source Software + + + + + +
+
Version: 0.41.1

Raw SQL queries

To run raw SQL queries using plugins, use app.platformatic.db.query method and passe to it a sql query using the app.platformatic.sql method.

'use strict'
module.exports = async(app, opts) => {
app.graphql.extendSchema(`
type YearlySales {
year: Int
sales: Int
}

extend type Query {
yearlySales: [YearlySales]
}
`)
app.graphql.defineResolvers({
Query: {
yearlySales: async(_, { title }) => {
const { db, sql } = app.platformatic;
const res = await db.query(sql(`
SELECT
YEAR(created_at) AS year,
SUM(amount) AS sales
FROM
orders
GROUP BY
YEAR(created_at)
`))
return res
}
}
})
}
+ + + + \ No newline at end of file diff --git a/docs/0.41.1/guides/compiling-typescript-for-deployment/index.html b/docs/0.41.1/guides/compiling-typescript-for-deployment/index.html new file mode 100644 index 00000000000..89acafd4d00 --- /dev/null +++ b/docs/0.41.1/guides/compiling-typescript-for-deployment/index.html @@ -0,0 +1,25 @@ + + + + + +Compiling Typescript for Deployment | Platformatic Open Source Software + + + + + +
+
Version: 0.41.1

Compiling Typescript for Deployment

Platformatic Service provides automatic TypeScript compilation during the startup +of your Node.js server. While this provides an amazing developer experience, in production it adds additional +start time and it requires more resources. In this guide, we show how to compile your TypeScript +source files before shipping to a server.

Setup

The following is supported by all Platformatic applications, as they are all based on the same plugin system. +If you have generated your application using npx create-platformatic@latest, you will have a similar section in your config file:

{
...
"plugins": {
"paths": [{
"path": "plugins",
"encapsulate": false
}, "routes"],
"typescript": "{PLT_TYPESCRIPT}"
}
}

Note that the {PLT_TYPESCRIPT} will be automatically replaced with the PLT_TYPESCRIPT environment variable, that is configured in your +.env (and .env.sample) file:

PLT_TYPESCRIPT=true

Older Platformatic applications might not have the same layout, if so you can update your settings to match (after updating your dependencies).

Compiling for deployment

Compiling for deployment is then as easy as running plt service compile in that same folder. +Rememeber to set PLT_TYPESCRIPT=false in your environment variables in the deployed environments.

Usage with Runtime

If you are building a Runtime-based application, you will need +to compile every service independently or use the plt runtime compile command.

Avoid shipping TypeScript sources

If you want to avoid shipping the TypeScript sources you need to configure Platformatic with the location +where your files have been built by adding an outDir option:

{
...
"plugins": {
"paths": [{
"path": "plugins",
"encapsulate": false
}, "routes"],
"typescript": {
"enabled": "{PLT_TYPESCRIPT}",
"outDir": "dist"
}
}
}

This is not necessary if you include tsconfig.json together with the compiled code.

+ + + + \ No newline at end of file diff --git a/docs/0.41.1/guides/debug-platformatic-db/index.html b/docs/0.41.1/guides/debug-platformatic-db/index.html new file mode 100644 index 00000000000..c72831de095 --- /dev/null +++ b/docs/0.41.1/guides/debug-platformatic-db/index.html @@ -0,0 +1,17 @@ + + + + + +Debug Platformatic DB | Platformatic Open Source Software + + + + + +
+
Version: 0.41.1

Debug Platformatic DB

Error: No tables found in the database

  • Verify your database connection string is correct in your Platformatic DB configuration
    • Make sure the database name is correct
  • Ensure that you have run the migration command npx platformatic db migrations apply before starting the server. See the Platformatic DB Migrations documentation for more information on working with migrations.

Logging SQL queries

You can see all the queries that are being run against your database in your terminal by setting the logger level to trace in your platformatic.db.json config file:

platformatic.db.json
{
"server": {
"logger": {
"level": "trace"
}
}
}
+ + + + \ No newline at end of file diff --git a/docs/0.41.1/guides/deploying-on-lambda/index.html b/docs/0.41.1/guides/deploying-on-lambda/index.html new file mode 100644 index 00000000000..0f451e2753f --- /dev/null +++ b/docs/0.41.1/guides/deploying-on-lambda/index.html @@ -0,0 +1,26 @@ + + + + + +Deploying on AWS Lambda | Platformatic Open Source Software + + + + + +
+
Version: 0.41.1

Deploying on AWS Lambda

It is possible to deploy Platformatic applications to AWS Lambda +by leveraging @fastify/aws-lambda.

Once you set up your Platformatic DB application, such as following +our tutorial, you can create a +server.mjs file as follows:

import awsLambdaFastify from '@fastify/aws-lambda'
import { buildServer } from '@platformatic/db'

const app = await buildServer('./platformatic.db.json')
// You can use the same approach with both Platformatic DB and
// and service
// const app = await buildServer('./platformatic.service.json')

// The following also work for Platformatic Service applications
// import { buildServer } from '@platformatic/service'
export const handler = awsLambdaFastify(app)

// Loads the Application, must be after the call to `awsLambdaFastify`
await app.ready()

This would be the entry point for your AWS Lambda function.

Avoiding cold start

Caching the DB schema

If you use Platformatic DB, you want to turn on the schemalock +configuration to cache the schema +information on disk.

Set the db.schemalock configuration to true, start the application, +and a schema.lock file should appear. Make sure to commit that file and +deploy your lambda.

Provisioned concurrency

Since AWS Lambda now enables the use of ECMAScript (ES) modules in Node.js 14 runtimes, +you could lower the cold start latency when used with Provisioned Concurrency +thanks to the top-level await functionality. (Excerpt taken from @fastify/aws-lambda)

+ + + + \ No newline at end of file diff --git a/docs/0.41.1/guides/deployment/advanced-fly-io-deployment/index.html b/docs/0.41.1/guides/deployment/advanced-fly-io-deployment/index.html new file mode 100644 index 00000000000..c88b9f21415 --- /dev/null +++ b/docs/0.41.1/guides/deployment/advanced-fly-io-deployment/index.html @@ -0,0 +1,22 @@ + + + + + +Advanced Fly.io Deployment | Platformatic Open Source Software + + + + + +
+
Version: 0.41.1

Advanced Fly.io Deployment

Techniques used in this guide are based on the Deploy to Fly.io with SQLite +deployment guide.

Adding sqlite for debugging

With a combination of Docker and Fly.io, you can create an easy way to debug +your sqlite aplication without stopping your application or exporting the data. +At the end of this guide, you will be able to run fly ssh console -C db-cli to +be dropped into your remote database.

Start by creating a script for launching the database, calling it db-cli.sh:

#!/bin/sh
set -x
# DSN will be defined in the Dockerfile
sqlite3 $DSN

Create a new Dockerfile which will act as the build and deployment image:

FROM node:18-alpine

# Setup sqlite viewer
RUN apk add sqlite
ENV DSN "/app/.platformatic/data/app.db"
COPY db-cli.sh /usr/local/bin/db-cli
RUN chmod +x /usr/local/bin/db-cli

WORKDIR /app
COPY package.json package.json
COPY package-lock.json package-lock.json

RUN npm ci --omit=dev

COPY platformatic.db.json platformatic.db.json

COPY migrations migrations
# Uncomment if your application is running a plugin
# COPY plugin.js plugin.js

EXPOSE 8080

CMD ["npm", "start"]

Add a start script to your package.json:

{
"scripts": {
"start": "platformatic db"
}
}

With Fly, it becomes straightforward to connect directly to the database by +running the following command from your local machine:

fly ssh console -C db-cli
+ + + + \ No newline at end of file diff --git a/docs/0.41.1/guides/deployment/deploy-to-fly-io-with-sqlite/index.html b/docs/0.41.1/guides/deployment/deploy-to-fly-io-with-sqlite/index.html new file mode 100644 index 00000000000..84faec33b89 --- /dev/null +++ b/docs/0.41.1/guides/deployment/deploy-to-fly-io-with-sqlite/index.html @@ -0,0 +1,33 @@ + + + + + +Deploy to Fly.io with SQLite | Platformatic Open Source Software + + + + + +
+
Version: 0.41.1

Deploy to Fly.io with SQLite

note

To follow this how-to guide, you'll first need to install the Fly CLI and create +an account by following this official guide. +You will also need an existing Platformatic DB project, please check out our +getting started guide if needed.

Navigate to your Platformatic DB project in the terminal on your local machine. +Run fly launch and follow the prompts. When it asks if you want to deploy +now, say "no" as there are a few things that you'll need to configure first.

You can also create the fly application with one line. This will create your +application in London (lhr):

fly launch --no-deploy --generate-name --region lhr --org personal --path .

The fly CLI should have created a fly.toml file in your project +directory.

Explicit builder

The fly.toml file may be missing an explicit builder setting. To have +consistent builds, it is best to add a build section:

[build]
builder = "heroku/buildpacks:20"

Database storage

Create a volume for database storage, naming it data:

fly volumes create data

This will create storage in the same region as the application. The volume +defaults to 3GB size, use -s to change the size. For example, -s 10 is 10GB.

Add a mounts section in fly.toml:

[mounts]
source = "data"
destination = "/app/.platformatic/data"

Create a directory in your project where your SQLite database will be created:

mkdir -p .platformatic/data

touch .platformatic/data/.gitkeep

The .gitkeep file ensures that this directory will always be created when +your application is deployed.

You should also ensure that your SQLite database is ignored by Git. This helps +avoid inconsistencies when your application is deployed:

echo "*.db" >> .gitignore

The command above assumes that your SQLite database file ends with the extension +.db — if the extension is different then you must change the command to match.

Change the connection string to an environment variable and make sure that +migrations are autoApplying (for platformatic@^0.4.0) in platformatic.db.json:

{
"db": {
"connectionString": "{DATABASE_URL}"
},
"migrations": {
"dir": "./migrations",
"autoApply": true
}
}

Configure server

Make sure that your platformatic.db.json uses environment variables +for the server section:

{
"server": {
"logger": {
"level": "{PLT_SERVER_LOGGER_LEVEL}"
},
"hostname": "{PLT_SERVER_HOSTNAME}",
"port": "{PORT}"
}
}

Configure environment

Start with your local environment, create a .env file and put the following:

PORT=3042
PLT_SERVER_HOSTNAME=127.0.0.1
PLT_SERVER_LOGGER_LEVEL=debug
DATABASE_URL=sqlite://.platformatic/data/movie-quotes.db

Avoid accidental leaks by ignoring your .env file:

echo ".env" >> .gitignore

This same configuration needs to added to fly.toml:

[env]
PORT = 8080
PLT_SERVER_HOSTNAME = "0.0.0.0"
PLT_SERVER_LOGGER_LEVEL = "info"
DATABASE_URL = "sqlite:///app/.platformatic/data/movie-quotes.db"

Deploy application

A valid package.json will be needed so if you do not have one, generate one +by running npm init.

In your package.json, make sure there is a start script to run your +application:

{
"scripts": {
"start": "platformatic db"
}
}

Before deploying, make sure a .dockerignore file is created:

cp .gitignore .dockerignore

Finally, deploy the application to Fly by running:

fly deploy
+ + + + \ No newline at end of file diff --git a/docs/0.41.1/guides/deployment/index.html b/docs/0.41.1/guides/deployment/index.html new file mode 100644 index 00000000000..567dbcb7453 --- /dev/null +++ b/docs/0.41.1/guides/deployment/index.html @@ -0,0 +1,46 @@ + + + + + +Deployment | Platformatic Open Source Software + + + + + +
+
Version: 0.41.1

Deployment

Applications built with Platformatic DB can be deployed to a hosting service +in the same way as any other Node.js application. This guide covers a few +things that will help smooth the path from development to production.

Running a Platformatic DB application

Make the Platformatic CLI available

To run a Platformatic DB application, the Platformatic CLI must be available +in the production environment. The most straightforward way of achieving this +is to install it as a project dependency. +This means that when npm install (or npm ci) is run as part of your +build/deployment process, the Platformatic CLI will be installed.

Define an npm run script

A number of hosting services will automatically detect if your project's +package.json has a start npm run script. They will then execute the command +npm start to run your application in production.

You can add platformatic db start as the command for your project's start +npm run script, for example:

{
...
"scripts": {
"start": "platformatic db start",
},
}

Server configuration

info

See the Configuration reference for all +configuration settings.

Configuration with environment variables

We recommend that you use environment variable placeholders +in your Platformatic DB configuration. This will allow you to configure +different settings in your development and production environments.

In development you can set the environment variables via a .env file +that will be automatically loaded by Platformatic DB. For example:

PORT=3042
PLT_SERVER_HOSTNAME=127.0.0.1

In production your hosting provider will typically provide their own mechanism +for setting environment variables.

Configure the server port

Configure the port that the server will listen on by setting an environment +variable placeholder in your Platformatic DB configuration file:

platformatic.db.json
{
"server": {
...
"port": "{PORT}"
},
...
}

Listen on all network interfaces

Most hosting providers require that you configure your server to bind to all +available network interfaces. To do this you must set the server hostname to +0.0.0.0.

This can be handled with an environment variable placeholder in your Platformatic +DB configuration file:

platformatic.db.json
{
"server": {
...
"hostname": "{PLT_SERVER_HOSTNAME}",
},
...
}

The environment variable PLT_SERVER_HOSTNAME should then be set to 0.0.0.0 +in your hosting environment.

Security considerations

We recommend disabling the GraphiQL web UI in production. It can be disabled +with the following configuration:

platformatic.db.json
{
"db": {
...
"graphql": {
"graphiql": false
}
},
...
}

If you want to use this feature in development, replace the configuration +values with environment variable placeholders +so you can set it to true in development and false in production.

Removing the welcome page

If you want to remove the welcome page, you should register an index route.

module.exports = async function (app) {
// removing the welcome page
app.get('/', (req, reply) => {
return { hello: 'world' }
})
}

Databases

Applying migrations

If you're running a single instance of your application in production, it's +best to allow Platformatic DB to automatically run migrations when the server +starts is. This reduces the chance of a currently running instance using a +database structure it doesn't understand while the new version is still being +deployed.

SQLite

When using an SQLite database, you can ensure you don’t commit it to your Git +repository by adding the SQLite database filename to your .gitignore file. +The SQLite database file will be automatically generated by Platformatic DB +when your application migrations are run in production.

+ + + + \ No newline at end of file diff --git a/docs/0.41.1/guides/dockerize-platformatic-app/index.html b/docs/0.41.1/guides/dockerize-platformatic-app/index.html new file mode 100644 index 00000000000..9b96bbe056b --- /dev/null +++ b/docs/0.41.1/guides/dockerize-platformatic-app/index.html @@ -0,0 +1,20 @@ + + + + + +Dockerize a Platformatic App | Platformatic Open Source Software + + + + + +
+
Version: 0.41.1

Dockerize a Platformatic App

This guide explains how to create a new Platformatic DB app, which connects to a PostgreSQL database.

We will then create a docker-compose.yml file that will run both services in separate containers

Generate a Platformatic DB App

Run this command in your terminal to start the Platformatic creator wizard:

npm create platformatic@latest

This interactive command-line tool will ask you some questions about how you'd +like to set up your new Platformatic project. For this guide, select these options:

- Which kind of project do you want to create?  => DB
- Where would you like to create your project? => quick-start
- Do you want to create default migrations? => Yes
- Do you want to create a plugin? => Yes
- Do you want to use TypeScript? => No
- Do you want to install dependencies? => Yes (this can take a while)
- Do you want to apply the migrations? => Yes
- Do you want to generate types? => Yes
- Do you want to create the github action to deploy this application to Platformatic Cloud dynamic workspace? => No
- Do you want to create the github action to deploy this application to Platformatic Cloud static workspace? => No

Once the wizard is complete, you'll have a Platformatic app project in the +folder quick-start, with example migration files and a plugin script.

info

Make sure you run the npm/yarn/pnpm command install command manually if you +don't ask the wizard to do it for you.

Create Docker image for the Platformatic DB App

In this step you are going to create some files into the root project directory

  • .dockerignore - This file tells Docker to ignore some files when copying the directory into the image filesystem
node_modules
.env*
  • start.sh - This is our entrypoint. We will run migrations then start platformatic
#!/bin/sh

echo "Running migrations..." && \
npx platformatic db migrations apply && \
echo "Starting Platformatic App..." && \
npm start
info

Make sure you make this file executable with the command chmod +x start.sh

  • Dockerfile - This is the file Docker uses to create the image
FROM node:18-alpine
WORKDIR /usr/src/app
COPY . .
RUN npm install
COPY . .
EXPOSE 3042
CMD [ "./start.sh" ]

At this point you can build your Docker image with the command

$ docker build -t platformatic-app .

Create Docker Compose config file

docker-compose.yml is the configuration file for docker-compose which will spin up containers for both PostgresSQL and our Platformatic App

version: "3.3"
services:
postgresql:
ports:
- "5433:5432"
image: "postgres:15-alpine"
environment:
- POSTGRES_PASSWORD=postgres
platformatic:
ports:
- "3042:3042"
image: 'platformatic-app:latest'
depends_on:
- postgresql
links:
- postgresql
environment:
PLT_SERVER_HOSTNAME: ${PLT_SERVER_HOSTNAME}
PORT: ${PORT}
PLT_SERVER_LOGGER_LEVEL: ${PLT_SERVER_LOGGER_LEVEL}
DATABASE_URL: postgres://postgres:postgres@postgresql:5432/postgres

A couple of things to notice:

  • The Platformatic app is started only once the database container is up and running (depends_on).
  • The Platformatic app is linked with postgresql service. Meaning that inside its container ping postgresql will be resolved with the internal ip of the database container.
  • The environment is taken directly from the .env file created by the wizard

You can now run your containers with

$ docker-compose up # (-d if you want to send them in the background)

Everything should start smoothly, and you can access your app pointing your browser to http://0.0.0.0:3042

To stop the app you can either press CTRL-C if you are running them in the foreground, or, if you used the -d flag, run

$ docker-compose down
+ + + + \ No newline at end of file diff --git a/docs/0.41.1/guides/generate-frontend-code-to-consume-platformatic-rest-api/index.html b/docs/0.41.1/guides/generate-frontend-code-to-consume-platformatic-rest-api/index.html new file mode 100644 index 00000000000..51be7ec3edc --- /dev/null +++ b/docs/0.41.1/guides/generate-frontend-code-to-consume-platformatic-rest-api/index.html @@ -0,0 +1,32 @@ + + + + + +Generate Front-end Code to Consume Platformatic REST API | Platformatic Open Source Software + + + + + +
+
Version: 0.41.1

Generate Front-end Code to Consume Platformatic REST API

By default, a Platformatic app exposes REST API that provide CRUD (Create, Read, +Update, Delete) functionality for each entity (see the +Introduction to the REST API +documentation for more information on the REST API).

Platformatic CLI allows to auto-generate the front-end code to import in your +front-end application to consume the Platformatic REST API.

This guide

  • Explains how to create a new Platformatic app.
  • Explains how to configure the new Platformatic app.
  • Explains how to create a new React or Vue.js front-end application.
  • Explains how to generate the front-end TypeScript code to consume the Platformatic app REST API.
  • Provide some React and Vue.js components (either of them written in TypeScript) that read, create, and update an entity.
  • Explains how to import the new component in your front-end application.

Create a new Platformatic app

Run this command in your terminal to start the Platformatic creator wizard:

npm create platformatic@latest

This interactive command-line tool will ask you some questions about how you'd +like to set up your new Platformatic project. For this guide, select these options:

- Which kind of project do you want to create?  => DB
- Where would you like to create your project? => quick-start
- Do you want to create default migrations? => Yes
- Do you want to create a plugin? => Yes
- Do you want to use TypeScript? => No
- Do you want to install dependencies? => Yes (this can take a while)
- Do you want to apply the migrations? => Yes
- Do you want to generate types? => Yes
- Do you want to create the github action to deploy this application to Platformatic Cloud dynamic workspace? => No
- Do you want to create the github action to deploy this application to Platformatic Cloud static workspace? => No

Once the wizard is complete, you'll have a Platformatic app project in the +folder quick-start, with example migration files and a plugin script.

info

Make sure you run the npm/yarn/pnpm command install command manually if you +don't ask the wizard to do it for you.

Configure the new Platformatic app

documentation to create a new Platformatic app. Every Platformatic app uses the "Movie" demo entity and includes +the corresponding table, migrations, and REST API to create, read, update, and delete movies.

Once the new Platformatic app is ready:

  • Set up CORS in platformatic.db.json
{
"$schema": "https://platformatic.dev/schemas/v0.24.0/db",
"server": {
"hostname": "{PLT_SERVER_HOSTNAME}",
"port": "{PORT}",
"logger": {
"level": "{PLT_SERVER_LOGGER_LEVEL}"
},
+ "cors": {
+ "origin": {
+ "regexp": "/*/"
+ }
+ }
},
...
}

You can find more details about the cors configuration here.

  • launch Platformatic through npm start. +Then, the Platformatic app should be available at the http://127.0.0.1:3042/ URL.

Create a new Front-end Application

Refer to the Scaffolding Your First Vite Project +documentation to create a new front-end application, and call it "rest-api-frontend".

info

Please note Vite is suggested only for practical reasons, but the bundler of choice does not make any difference.

If you are using npm 7+ you should run

npm create vite@latest rest-api-frontend -- --template react-ts

and then follow the Vite's instructions

Scaffolding project in /Users/noriste/Sites/temp/platformatic/rest-api-frontend...

Done. Now run:

cd rest-api-frontend
npm install
npm run dev

Once done, the front-end application is available at http://localhost:5174/.

Generate the front-end code to consume the Platformatic app REST API

Now that either the Platformatic app and the front-end app are running, go to the front-end codebase and run the Platformatic CLI

cd rest-api-frontend/src
npx platformatic frontend http://127.0.0.1:3042 ts

Refer to the Platformatic CLI frontend command +documentation to know about the available options.

The Platformatic CLI generates

  • api.d.ts: A TypeScript module that includes all the OpenAPI-related types. +Here is part of the generated code
interface GetMoviesRequest {
'limit'?: number;
'offset'?: number;
// ... etc.
}

interface GetMoviesResponseOK {
'id'?: number;
'title': string;
}


// ... etc.

export interface Api {
setBaseUrl(baseUrl: string): void;
getMovies(req: GetMoviesRequest): Promise<Array<GetMoviesResponseOK>>;
createMovie(req: CreateMovieRequest): Promise<CreateMovieResponseOK>;
// ... etc.
}
  • api.ts: A TypeScript module that includes a typed function for every single OpenAPI endpoint. +Here is part of the generated code
import type { Api } from './api-types'

let baseUrl = ''
export function setBaseUrl(newUrl: string) { baseUrl = newUrl };

export const createMovie: Api['createMovie'] = async (request) => {
const response = await fetch(`${baseUrl}/movies/`, {
method:'post',
body: JSON.stringify(request),
headers: {
'Content-Type': 'application/json'
}
})

if (!response.ok) {
throw new Error(await response.text())
}

return await response.json()
}

// etc.

You can add a --name option to the command line to provide a custom name for the generated files.

cd rest-api-frontend/src
npx platformatic frontend --name foobar http://127.0.0.1:3042 ts

will generated foobar.ts and foobar-types.d.ts

React and Vue.js components that read, create, and update an entity

You can copy/paste the following React or Vue.js components that import the code +the Platformatic CLI generated.

Create a new file src/PlatformaticPlayground.tsx and copy/paste the following code.

import { useEffect, useState } from 'react'

// getMovies, createMovie, and updateMovie are all functions automatically generated by Platformatic
// in the `api.ts` module.
import { getMovies, createMovie, updateMovie, setBaseUrl } from './api'

setBaseUrl('http://127.0.0.1:3042') // configure this according to your needs

export function PlatformaticPlayground() {
const [movies, setMovies] = useState<Awaited<ReturnType<typeof getMovies>>>([])
const [newMovie, setNewMovie] = useState<Awaited<ReturnType<typeof createMovie>>>()

async function onCreateMovie() {
const newMovie = await createMovie({ title: 'Harry Potter' })
setNewMovie(newMovie)
}

async function onUpdateMovie() {
if (!newMovie || !newMovie.id) return

const updatedMovie = await updateMovie({ id: newMovie.id, title: 'The Lord of the Rings' })
setNewMovie(updatedMovie)
}

useEffect(() => {
async function fetchMovies() {
const movies = await getMovies({})
setMovies(movies)
}

fetchMovies()
}, [])

return (
<>
<h2>Movies</h2>

{movies.length === 0 ? (
<div>No movies yet</div>
) : (
<ul>
{movies.map((movie) => (
<li key={movie.id}>{movie.title}</li>
))}
</ul>
)}

<button onClick={onCreateMovie}>Create movie</button>
<button onClick={onUpdateMovie}>Update movie</button>

{newMovie && <div>Title: {newMovie.title}</div>}
</>
)
}

Import the new component in your front-end application

You need to import and render the new component in the front-end application.

Change the App.tsx as follows

import { useState } from 'react'
import reactLogo from './assets/react.svg'
import viteLogo from '/vite.svg'
import './App.css'

+import { PlatformaticPlayground } from './PlatformaticPlayground'

function App() {
const [count, setCount] = useState(0)

return (
<>
+ <PlatformaticPlayground />
<div>
<a href="https://vitejs.dev" target="_blank">
<img src={viteLogo} className="logo" alt="Vite logo" />
</a>
<a href="https://react.dev" target="_blank">
<img src={reactLogo} className="logo react" alt="React logo" />
</a>
</div>
<h1>Vite + React</h1>
<div className="card">
<button onClick={() => setCount((count) => count + 1)}>count is {count}</button>
<p>
Edit <code>src/App.tsx</code> and save to test HMR
</p>
</div>
<p className="read-the-docs">Click on the Vite and React logos to learn more</p>
</>
)
}

export default App

Have fun

Art the top of the front-end application the new component requests the movies to the Platformatic app and list them.

Platformatic frontend guide: listing the movies

Click on "Create movie" to create a new movie called "Harry Potter".

Platformatic frontend guide: creating a movie

Click on "Update movie" to rename "Harry Potter" into "Lord of the Rings".

Platformatic frontend guide: editing a movie

Reload the front-end application to see the new "Lord of the Rings" movie listed.

Platformatic frontend guide: listing the movies +.

+ + + + \ No newline at end of file diff --git a/docs/0.41.1/guides/jwt-auth0/index.html b/docs/0.41.1/guides/jwt-auth0/index.html new file mode 100644 index 00000000000..81875f91de7 --- /dev/null +++ b/docs/0.41.1/guides/jwt-auth0/index.html @@ -0,0 +1,21 @@ + + + + + +Configure JWT with Auth0 | Platformatic Open Source Software + + + + + +
+
Version: 0.41.1

Configure JWT with Auth0

Auth0 is a powerful authentication and authorization service provider that can be integrated with Platformatic DB through JSON Web Tokens (JWT) tokens. +When a user is authenticated, Auth0 creates a JWT token with all necessary security informations and custom claims (like the X-PLATFORMATIC-ROLE, see User Metadata) and signs the token.

Platformatic DB needs the correct public key to verify the JWT signature. +The fastest way is to leverage JWKS, since Auth0 exposes a JWKS endpoint for each tenant. +Given a Auth0 tenant's issuer URL, the (public) keys are accessible at ${issuer}/.well-known/jwks.json. +For instance, if issuer is: https://dev-xxx.us.auth0.com/, the public keys are accessible at https://dev-xxx.us.auth0.com/.well-known/jwks.json

To configure Platformatic DB authorization to use JWKS with Auth0, set:


...
"authorization": {
"jwt": {
"jwks": {
"allowedDomains": [
"https://dev-xxx.us.auth0.com/"
]
}
},
}
...

danger

Note that specify allowedDomains is critical to correctly restrict the JWT that MUST be issued from one of the allowed domains.

Custom Claim Namespace

In Auth0 there are restrictions about the custom claim that can be set on access tokens. One of these is that the custom claims MUST be namespaced, i.e. we cannot have X-PLATFORMATIC-ROLE but we must specify a namespace, e.g.: https://platformatic.dev/X-PLATFORMATIC-ROLE

To map these claims to user metadata removing the namespace, we can specify the namespace in the JWT options:

...
"authorization": {
"jwt": {
"namespace": "https://platformatic.dev/",
"jwks": {
"allowedDomains": [
"https://dev-xxx.us.auth0.com/"
]
}
},
}
...

With this configuration, the https://platformatic.dev/X-PLATFORMATIC-ROLE claim is mapped to X-PLATFORMATIC-ROLE user metadata.

+ + + + \ No newline at end of file diff --git a/docs/0.41.1/guides/migrating-express-app-to-platformatic-service/index.html b/docs/0.41.1/guides/migrating-express-app-to-platformatic-service/index.html new file mode 100644 index 00000000000..e57f4e408e4 --- /dev/null +++ b/docs/0.41.1/guides/migrating-express-app-to-platformatic-service/index.html @@ -0,0 +1,17 @@ + + + + + +Migrating an Express app to Platformatic Service | Platformatic Open Source Software + + + + + +
+
Version: 0.41.1

Migrating an Express app to Platformatic Service

Introduction

Our open-source tools are built on top of the modern and flexible Fastify web framework. It provides logging, request validation and a powerful plugin system out-of-the-box, as well as incredible performance.

If you have an existing Express application, migrating it to Fastify could potentially be time consuming, and might not be something that you're able to prioritise right now. You can however still take advantage of Fastify and our open-source tools. In this guide you'll learn how to use the @fastify/express plugin to help you rapidly migrate your existing Express application to use Platformatic Service.

This guide assumes that you have some experience building applications with the Express framework.

Example Express application

For the purpose of this guide, we have a basic example Express application. Although this app has a specific structure, the migration steps covered in this guide can generally be applied to any Express application.

The code for the example Express and migrated Platformatic Service applications is available on GitHub.

Here's the structure of the example Express application:

├── app.js
├── package.json
├── routes
│ └── users.js
└── server.js

It has the following dependencies:

// package.json

"dependencies": {
"express": "^4.18.2"
}

The application has routes in routes/users.js:

// routes/users.js

import express from 'express'

const router = express.Router()

router.use(express.json())

router.post('/', function createUser(request, response, next) {
const newUser = request.body

if (!newUser) {
return next(new Error('Error creating user'))
}

response.status(201).json(newUser)
})

router.get('/:user_id', function getUser(request, response, next) {
const user = {
id: Number(request.params.user_id),
first_name: 'Bobo',
last_name: 'Oso'
}

response.json(user)
})

export const usersRoutes = router

In app.js, we have a factory function that creates a new Express server instance and mounts the routes:

// app.js

import express from 'express'

import { usersRoutes } from './routes/users.js'

export default function buildApp() {
const app = express()

app.use('/users', usersRoutes)

return app
}

And in server.js we're calling the factory function and starting the server listening for HTTP requests:

// server.js

import buildApp from './app.js'

const express = buildApp()

express.listen(3042, () => {
console.log('Example app listening at http://localhost:3042')
})

The routes in your Express application should be mounted on an Express router (or multiple routers if needed). This will allow them to be mounted using @fastify/express when you migrate your app to Platformatic Service.

Creating a new Platformatic Service app

To migrate your Express app to Platformatic Service, create a new Platformatic Service app with:

npm create platformatic@latest

Be sure to select Service as the project type. You should also say yes when you're asked if you want to create the GitHub Actions workflows for deploying your application to Platformatic Cloud.

Once the project has been created, you can delete the example plugins and routes directories.

Using ES modules

If you're using ES modules in the Express application code that you'll be migrating, ensure that there's a type field in package.json set to module:

npm pkg set type=module

Migrate the Express routes

Copy over the routes directory from your Express app.

Install @fastify/express

Install the @fastify/express Fastify plugin to add full Express compability to your Platformatic Service app:

npm install @fastify/express

Mounting the Express routes

Create a root Fastify plugin that register's the @fastify/express plugin and loads your Express routes:

// root-plugin.js

import { usersRoutes } from './routes/users.js'

/** @param {import('fastify').FastifyInstance} app */
export default async function (app) {
await app.register(import('@fastify/express'))

app.use('/users', usersRoutes)
}

Configuring the Platformatic Service app

Edit your app's platformatic.service.json to load your root plugin:

// platformatic.service.json

{
...,
"plugins": {
"paths": [{
"path": "./root-plugin.js",
"encapsulate": false
}],
"hotReload": false
},
"watch": false
}

These settings are important when using @fastify/express in a Platformatic Service app:

  • encapsulate — You'll need to disable encapsulation for any Fastify plugin which mounts Express routes. This is due to the way that @fastify/express works.
  • hotReload and watch — You'll need to disable hot reloading and watching for your app, as they don't currently work when using @fastify/express. This is a known issue that we're working to fix.

Wrapping up

You can learn more about building Node.js apps with Platformatic service in the Platformatic Service documentation.

Once you've migrated your Express app to use Platformatic Service with @fastify/express, you might then want to consider fully migrating your Express routes and application code to Fastify. This tutorial shows how you can approach that migration process: How to migrate your app from Express to Fastify (video).

+ + + + \ No newline at end of file diff --git a/docs/0.41.1/guides/migrating-fastify-app-to-platformatic-service/index.html b/docs/0.41.1/guides/migrating-fastify-app-to-platformatic-service/index.html new file mode 100644 index 00000000000..b77fbfa13ba --- /dev/null +++ b/docs/0.41.1/guides/migrating-fastify-app-to-platformatic-service/index.html @@ -0,0 +1,17 @@ + + + + + +Migrating a Fastify app to Platformatic Service | Platformatic Open Source Software + + + + + +
+
Version: 0.41.1

Migrating a Fastify app to Platformatic Service

Introduction

Building production ready Node.js application with Fastify can require a certain amount of boilerplate code. This is a side effect of some of Fastify's technical principles:

  • If it can be a plugin, it should be a pluginPlugins help with the separation of concerns, they improve testability, and also provide a way to logically organise and structure your applications.
  • Developer choice = developer freedom — Fastify only applies a few strong opinions, in key areas such as logging and validation. The framework features have been designed to give you the freedom to build your applications however you want.
  • You know your needs best — Fastify doesn't make assumptions about what plugins you'll need in your application. As the Fastify plugin ecosystem and the community has grown, a clear group of popular plugin choices has emerged.

Platformatic Service is the natural evolution of the build-it-from-scratch Fastify development experience. It provides a solid foundation for building Node.js applications on top of Fastify, with best practices baked in.

See the Building apps with Platformatic Service section of this guide to learn more about the built-in features.

The good news is that the path to migrate a Fastify application to use Platformatic Service is fairly straightforward. This guide covers some of the things you'll need to know when migrating an application, as well as tips on different migration approaches.

This guide assumes that you have some experience building applications with the Fastify framework. If you'd like to learn more about about building web applications with Fastify, we recommend taking a look at:

Example Fastify application

For the purpose of this guide, we have a basic example Fastify application. Although this app has a specific structure, the migration steps covered in this guide can generally be applied to any Fastify application.

The code for the example Fastify and migrated Platformatic Service applications is available on GitHub.

Here's the structure of the example Fastify application:

├── app.js
├── package.json
├── plugins
│   └── data-source.js
├── routes
│   ├── movies.js
│   └── quotes.js
├── server.js
└── test
└── routes.test.js

It has the following dependencies:

// package.json

"dependencies": {
"fastify": "^4.17.0",
"fastify-plugin": "^4.5.0"
}

The application has a plugin that decorates the Fastify server instance, as well as two Fastify plugins which define API routes. Here's the code for them:

// plugins/data-source.js

import fastifyPlugin from 'fastify-plugin'

/** @param {import('fastify').FastifyInstance} app */
async function dataSource (app) {
app.decorate('movies', [
'Jaws',
'Star Wars',
'The Wizard of Oz'
])

app.decorate('quotes', [
'You\'re gonna need a bigger boat.',
'May the Force be with you.',
'Toto, I\'ve got a feeling we\'re not in Kansas anymore.'
])
}

export default fastifyPlugin(dataSource)

fastify-plugin is used to to prevent Fastify from creating a new encapsulation context for the plugin. This makes the decorators that are registered in the dataSource plugin available in the route plugins. You can learn about this fundamental Fastify concept in the Fastify Encapsulation documentation.

// routes/movies.js

/** @param {import('fastify').FastifyInstance} app */
export default async function movieRoutes (app) {
app.get('/', async (request, reply) => {
return app.movies
})
}
// routes/quotes.js

/** @param {import('fastify').FastifyInstance} app */
export default async function quotesRoutes (app) {
app.get('/', async (request, reply) => {
return app.quotes
})
}

The route plugins aren't registering anything that needs to be available in other plugins. They have their own encapsulation context and don't need to be wrapped with fastify-plugin.

There's also a buildApp() factory function in app.js, which takes care of creating a new Fastify server instance and registering the plugins and routes:

// app.js

import fastify from 'fastify'

export async function buildApp (options = {}) {
const app = fastify(options)

app.register(import('./plugins/data-source.js'))

app.register(import('./routes/movies.js'), { prefix: '/movies' })
app.register(import('./routes/quotes.js'), { prefix: '/quotes' })

return app
}

And server.js, which calls the buildApp function to create a new Fastify server, and then starts it listening:

// server.js

import { buildApp } from './app.js'

const port = process.env.PORT || 3042
const host = process.env.HOST || '127.0.0.1'

const options = {
logger: {
level: 'info'
}
}

const app = await buildApp(options)

await app.listen({ port, host })

As well as a couple of tests for the API routes:

// tests/routes.test.js

import { test } from 'node:test'
import assert from 'node:assert/strict'

import { buildApp } from '../app.js'

test('Basic API', async (t) => {
const app = await buildApp()

t.after(async () => {
await app.close()
})

await t.test('GET request to /movies route', async () => {
const response = await app.inject({
method: 'GET',
url: '/movies'
})

assert.equal(response.statusCode, 200)
assert.deepEqual(response.json(), [
'Jaws',
'Star Wars',
'The Wizard of Oz'
])
})

await t.test('GET request to /quotes route', async () => {
const response = await app.inject({
method: 'GET',
url: '/quotes'
})

assert.equal(response.statusCode, 200)
assert.deepEqual(response.json(), [
'You\'re gonna need a bigger boat.',
'May the Force be with you.',
'Toto, I\'ve got a feeling we\'re not in Kansas anymore.'
])
})
})

These tests are using the built in Node.js test runner, node:test. They can be run with the command: node --test --test-reporter=spec test/*.test.js.

The @param lines in this application code are JSDoc blocks that import the FastifyInstance type. This allows many code editors to provide auto-suggest, type hinting and type checking for your code.

Creating a new Platformatic Service app

To migrate your Fastify app to Platformatic Service, create a new Platformatic Service app with:

npm create platformatic@latest

Be sure to select Service as the project type. Once the project has been created, you can delete the example plugins and routes directories.

App configuration

The configuration for the Platformatic Service app is stored in platformatic.service.json.

The generated configuration is set up to load plugins from the plugins and routes directories:

// platformatic.service.json

"plugins": {
"paths": [
"./plugins",
"./routes"
]
}

The value for any configuration setting in platformatic.service.json can be replaced with an environment variable by adding a placeholder, for example {PLT_SERVER_LOGGER_LEVEL}. In development, environment variables are automatically loaded by your Platformatic Service app from a .env file in the root directory of your app. In production, you'll typically set these environment variables using a feature provided by your hosting provider.

See the Platformatic Service documentation for Environment variable placeholders to learn more about how this works.

Using ES modules

If you're using ES modules in the Fastify application code that you'll be migrating, ensure that there's a type field in package.json set to module:

npm pkg set type=module

Refactoring Fastify server factories

If your Fastify application has a script with a factory function to create and build up a Fastify server instance, you can refactor it into a Fastify plugin and use it in your Platformatic Service app.

Here are a few things to consider while refactoring it:

  • Move the options you're passing to Fastify when creating a new server instance to the server block in platformatic.service.json. These options will be passed through directly by Platformatic Service when it creates a Fastify server instance.
  • You can create a root plugin to be loaded by your Platformatic Service app, for example: export default async function rootPlugin (app, options) { ... }
  • When you copy the code from your factory function into your root plugin, remove the code which is creating the Fastify server instance.
  • You can configure your Platformatic Service to load the root plugin, for example:
    "plugins": {
    "paths": ["./root-plugin.js"]
    }
  • If you need to pass options to your root plugin, you can do it like this:
    "plugins": {
    "paths": [
    {
    "path": "./root-plugin.js",
    "options": {
    "someOption": true
    }
    }
    ]
    }

Migrating plugins

Copy over the plugins directory from your Fastify app. You shouldn't need to make any modifications for them to work with Platformatic Service.

Disabling plugin encapsulation

Platformatic Service provides a configuration setting which enables you to disable encapsulation for a plugin, or all the plugins within a directory. This will make any decorators or hooks that you set in those plugins available to all other plugins. This removes the need for you to wrap your plugins with fastify-plugin.

To disable encapsulation for all plugins within the plugins directory, you would set your plugins configuration like this in platformatic.service.json:

// platformatic.service.json

"plugins": {
"paths": [
{
"path": "./plugins",
"encapsulate": false
},
"./routes"
]
}

You can learn more about plugin encapsulation in the Fastify Plugins Guide.

Migrating routes

Copy over the routes directory from your Fastify app.

Explicit route paths

If you're registering routes in your Fastify application with full paths, for example /movies, you won't need to make any changes to your route plugins.

Route prefixing with file-system based routing

If you're using the prefix option when registering route plugins in your Fastify application, for example:

app.register(import('./routes/movies.js'), { prefix: '/movies' })

You can achieve the same result with Platformatic Service by using file-system based routing. With the following directory and file structure:

routes/
├── movies
│   └── index.js
└── quotes
└── index.js

Assuming that both of the route files register a / route, these are the route paths that will be registered in your Platformatic Service app:

/movies
/quotes

With the example Fastify application, this would mean copying the route files over to these places in the Platformatic Service app:

routes/movies.js -> routes/movies/index.js
routes/quotes.js -> routes/quotes/index.js

How does this work? Plugins are loaded with the @fastify/autoload Fastify plugin. The dirNameRoutePrefix plugin option for @fastify/autoload is enabled by default. This means that "routes will be automatically prefixed with the subdirectory name in an autoloaded directory".

If you'd prefer not to use file-system based routing with Platformatic Service, you can add prefixes to the paths for the routes themselves (see Explicit route paths).

Adapting existing usage of @fastify/autoload

If you're using @fastify/autoload in your Fastify application, there are a couple of approaches you can take when migrating the app to Platformatic Service:

  • Configure plugins in your Platformatic Service app's platformatic.service.json. It will then take care of loading your routes and plugins for you with @fastify/autoload (configuration documentation).
  • You can continue to use @fastify/autoload directly with a little refactoring. See the tips in the Refactoring Fastify server factories section.

Migrating tests

You'll generally use the Platformatic CLI to start your Platformatic Service app (npx platformatic start). However for testing, you can use the programmatic API provided by Platformatic Service. This allows you to load your app in your test scripts and then run tests against it.

If you copy over the tests from your existing Fastify app, they will typically only require a small amount of refactoring to work with Platformatic Service.

Replacing your Fastify server factory function

The example Fastify app has a buildApp() factory function which creates a Fastify server instance. The import line for that function can be removed from tests/routes.test.js:

// tests/routes.test.js

import { buildApp } from '../app.js'

And replaced with an import of the buildServer() function from @platformatic/service:

// tests/routes.test.js

import { buildServer } from '@platformatic/service'

You can then load your Platformatic Service app like this:


const app = await buildServer('./platformatic.service.json')

Disabling server logging in your tests

If you have logged enabled for your Platformatic Service app, you'll probably want to disable the logging in your tests to remove noise from the output that you receive when you run your tests.

Instead of passing the path to your app's configuration to buildServer(), you can import the app configuration and disable logging:

// tests/routes.test.js

import serviceConfig from '../platformatic.service.json' assert { type: 'json' }

serviceConfig.server.logger = false

Then pass that serviceConfig configuration object to the buildServer() function:

// tests/routes.test.js

const app = await buildServer(serviceConfig)

Import assertions — the assert { type: 'json' } syntax — are not a stable feature of the JavaScript language, so you'll receive warning messages from Node.js when running your tests. You can disable these warnings by passing the --no-warnings flag to node.

Building apps with Platformatic Service

Because Platformatic Service is built on top of the Fastify framework, you're able to use the full functionality of the Fastify framework in your Platformatic Service app. This includes:

  • Fast, structured logging, provided by Pino
  • Request validation with JSON Schema and Ajv (other validation libraries are supported too)
  • Hooks, which allow fine grained control over when code is run during the request/response lifecycle.
  • Decorators, which allow you to customize core Fastify objects and write more modular code.

Platformatic Service also provides many other features that are built on top of Fastify.

Application features

All Platformatic Service features are fully configurable via platformatic.service.json.

Development features

  • Hot reloading — Your server will automatically reload in development as you develop features.
  • Write your plugins in JavaScript or TypeScript — TypeScript support is provided out-of-the-box and supports hot reloading.
  • Pretty printed logs — Making it easier to understand and debug your application during development.

See the Platformatic Service Configuration documentation for all of the features which can be configured.

Next steps

The documentation for Platformatic Service is a helpful reference when building a Platformatic Service app.

Watch: Understand the parts of a Platformatic app

You want to be confident that you understand how your applications work. In this video you'll learn about the parts that make up a Platformatic application, what each part does, and how they fit together.

Our series of Platformatic How-to videos can help get you up and running building apps with Platformatic open-source tools.

Got questions or need help migrating your Fastify app to use Platformatic Service? Drop by our Discord server and we'll be happy to help you.

+ + + + \ No newline at end of file diff --git a/docs/0.41.1/guides/monitoring/index.html b/docs/0.41.1/guides/monitoring/index.html new file mode 100644 index 00000000000..1eebee2a057 --- /dev/null +++ b/docs/0.41.1/guides/monitoring/index.html @@ -0,0 +1,24 @@ + + + + + +Monitoring with Prometheus and Grafana | Platformatic Open Source Software + + + + + +
+
Version: 0.41.1

Monitoring with Prometheus and Grafana

Prometheus is open source system and alerting toolkit for monitoring and alerting. It's a time series database that collects metrics from configured targets at given intervals, evaluates rule expressions, displays the results, and can trigger alerts if some condition is observed to be true. +Grafana is an open source visualization and analytics software.

It's a pretty common solution to use Prometheus to collect and store monitoring data, and Grafana to visualize it.

Platformatic can be configured to expose Prometheus metrics:

...
"metrics": {
"port": 9091,
"auth": {
"username": "platformatic",
"password": "mysecret"
}
}
...

In this case, we are exposing the metrics on port 9091 (defaults to 9090), and we are using basic authentication to protect the endpoint. +We can also specify the IP address to bind to (defaults to 0.0.0.0). +Note that the metrics port is not the default in this configuration. This is because if you want to test the integration running both Prometheus and Platformatic on the same host, Prometheus starts on 9090 port too. +All the configuration settings are optional. To use the default settings, set "metrics": true. See the configuration reference for more details.

caution

Use environment variable placeholders in your Platformatic DB configuration file to avoid exposing credentials.

Prometheus Configuration

This is an example of a minimal Prometheus configuration to scrape the metrics from Platformatic:

global:
scrape_interval: 15s
scrape_timeout: 10s
evaluation_interval: 1m
scrape_configs:
- job_name: 'platformatic'
scrape_interval: 2s
metrics_path: /metrics
scheme: http
basic_auth:
username: platformatic
password: mysecret
static_configs:
- targets: ['192.168.69.195:9091']
labels:
group: 'platformatic'

We specify a target configuring the IP address and the port where Platformatic is running, and we specify the username and password to use for basic authentication. The metrics path is the one used by Platformatic. The ip address is not a loopback address so this will work even with Prometheus running in docker on the same host (see below), please change it to your host ip.

To test this configuration, we can run Prometheus locally using docker and docker-compose, so please be sure to have both correctly installed. +Save the above configuration in a file named ./prometheus/prometheus.yml and create a docker-compose.yml:

version: "3.7"

services:
prometheus:
image: prom/prometheus:latest
volumes:
- prometheus_data:/prometheus
- ./prometheus/prometheus.yml:/etc/prometheus/prometheus.yml
command:
- '--config.file=/etc/prometheus/prometheus.yml'
ports:
- '9090:9090'

volumes:
prometheus_data: {}

Then run docker-compose up -d and open http://localhost:9090 in your browser. You should see the Prometheus dashboard, and you can also query the metrics, e.g. {group="platformatic"}. See Prometheus docs for more information on querying and metrics.

Grafana Configuration

Let's see how we can configure Grafana to chart some Platformatics metrics from Prometheus. +Change the docker-compose.yml to add a grafana service:

version: "3.7"
services:

prometheus:
image: prom/prometheus:latest
volumes:
- prometheus_data:/prometheus
- ./prometheus/prometheus.yml:/etc/prometheus/prometheus.yml
command:
- '--config.file=/etc/prometheus/prometheus.yml'
ports:
- '9090:9090'

grafana:
image: grafana/grafana:latest
volumes:
- grafana_data:/var/lib/grafana
environment:
- GF_SECURITY_ADMIN_PASSWORD=pleasechangeme
depends_on:
- prometheus
ports:
- '3000:3000'

volumes:
prometheus_data: {}
grafana_data: {}

In Grafana, select Configuration -> Data Sources -> Add Data Source, and select Prometheus. +In the URL field, specify the URL of the Prometheus server, e.g. http://prometheus:9090 (the name of the service in the docker-compose file), then Save & Test.

Now we can create a dashboard and add panels to it. Select the Prometheus data source, and add queries. You should see the metrics exposed by Platformatic.

It's also possible to import pre-configured dashboards, like this one from Grafana.com.

+ + + + \ No newline at end of file diff --git a/docs/0.41.1/guides/packaging-an-application-as-a-module/index.html b/docs/0.41.1/guides/packaging-an-application-as-a-module/index.html new file mode 100644 index 00000000000..71704566ee6 --- /dev/null +++ b/docs/0.41.1/guides/packaging-an-application-as-a-module/index.html @@ -0,0 +1,27 @@ + + + + + +Packaging a Platformatic Application as a module | Platformatic Open Source Software + + + + + +
+
Version: 0.41.1

Packaging a Platformatic Application as a module

Platformatic Service and Platformatic DB +offer a good starting point to create new applications. However, most developers or organizations might want to +create reusable services or applications built on top of Platformatic. +This is useful to publish the application on the public npm registry (or a private one!), including building your own CLI, +or to create a specialized template for your organization to allow for centralized bugfixes and updates.

This process is the same one we use to maintain Platformatic DB and Platformatic Composer on top of Platformatic Service.

Creating a custom Service

We are creating the module foo.js as follows:

const { schema, platformaticService } = require('@platformatic/service')

/** @type {import('fastify').FastifyPluginAsync<{}>} */
async function foo (app, opts) {
const text = app.platformatic.config.foo.text
app.get('/foo', async (request, reply) => {
return text
})

await platformaticService(app, opts)
}

foo.configType = 'foo'

// break Fastify encapsulation
foo[Symbol.for('skip-override')] = true

// The schema for our configuration file
foo.schema = {
$id: 'https://example.com/schemas/foo.json',
title: 'Foo Service',
type: 'object',
properties: {
server: schema.server,
plugins: schema.plugins,
metrics: schema.metrics,
watch: {
anyOf: [schema.watch, {
type: 'boolean'
}, {
type: 'string'
}]
},
$schema: {
type: 'string'
},
module: {
type: 'string'
},
foo: {
type: 'object',
properties: {
text: {
type: 'string'
}
},
required: ['text']
}
},
additionalProperties: false,
required: ['server']
}

// The configuration for the ConfigManager
foo.configManagerConfig = {
schema: foo.schema,
envWhitelist: ['PORT', 'HOSTNAME'],
allowToWatch: ['.env'],
schemaOptions: {
useDefaults: true,
coerceTypes: true,
allErrors: true,
strict: false
}
}

module.exports = foo

Note that the $id property of the schema identifies the module in our system, +allowing us to retrieve the schema correctly. +It is recommended, but not required, that the JSON schema is actually +published in this location. Doing so allows tooling such as the VSCode +language server to provide autocompletion.

In this example, the schema adds a custom top-level foo property +that users can use to configure this specific module.

ESM is also supported.

Consuming a custom application

Consuming foo.js is simple. We can create a platformatic.json file as follows:

{
"$schema": "https://example.com/schemas/foo.json",
"module": "./foo",
"server": {
"port": 0,
"hostname": "127.0.0.1"
},
"foo": {
"text": "Hello World"
}
}

Note that we must specify both the $schema property and module. +Module can also be any modules published on npm and installed via your package manager.

Building your own CLI

It is possible to build your own CLI with the following cli.mjs file:

import foo from './foo.js'
import { start } from '@platformatic/service'
import { printAndExitLoadConfigError } from '@platformatic/config'

await start(foo, process.argv.splice(2)).catch(printConfigValidationErrors)

This will also load platformatic.foo.json files.

+ + + + \ No newline at end of file diff --git a/docs/0.41.1/guides/prisma/index.html b/docs/0.41.1/guides/prisma/index.html new file mode 100644 index 00000000000..f747e85a7bb --- /dev/null +++ b/docs/0.41.1/guides/prisma/index.html @@ -0,0 +1,17 @@ + + + + + +Integrate Prisma with Platformatic DB | Platformatic Open Source Software + + + + + +
+
Version: 0.41.1

Integrate Prisma with Platformatic DB

Prisma is an open-source ORM for Node.js and TypeScript. It is used as an alternative to writing SQL, or using another database access tool such as SQL query builders (like knex.js) or ORMs (like TypeORM and Sequelize). Prisma currently supports PostgreSQL, MySQL, SQL Server, SQLite, MongoDB, and CockroachDB.

Prisma can be used with JavaScript or TypeScript, and provides a level to type-safety that goes beyond the guarantees made by other ORMs in the TypeScript ecosystem. You can find an in-depth comparison of Prisma against other ORMs here.

If you want to get a quick overview of how Prisma works, you can follow the Quickstart or read the Introduction in the Prisma documentation.

How Prisma can improve your workflow with Platformatic DB

While Platformatic speeds up development of your REST and GraphQL APIs, Prisma can complement the workflow in several ways:

  1. Provides an intuitive data modeling language
  2. Provides auto-generated and customizable SQL migrations
  3. Provides type-safety and auto-completion for your database queries

You can learn more about why Prisma and Platformatic are a great match this article.

Prerequisites

To follow along with this guide, you will need to have the following:

Setup Prisma

Install the Prisma CLI and the db-diff development dependencies in your project:

npm install --save-dev prisma @ruheni/db-diff

Next, initialize Prisma in your project

npx prisma init

This command does the following:

  • Creates a new directory called prisma which contains a file called schema.prisma. This file defines your database connection and the Prisma Client generator.
  • Creates a .env file at the root of your project if it doesn't exist. This defines your environment variables (used for your database connection).

You can specify your preferred database provider using the --datasource-provider flag, followed by the name of the provider:

npx prisma init --datasource-provider postgresql # or sqlite, mysql, sqlserver, cockroachdb

Prisma uses the DATABASE_URL environment variable to connect to your database to sync your database and Prisma schema. It also uses the variable to connect to your database to run your Prisma Client queries.

If you're using PostgreSQL, MySQL, SQL Server, or CockroachDB, ensure that the DATABASE_URL used by Prisma is the same as the one used by Platformatic DB project. If you're using SQLite, refer to the Using Prisma with SQLite section.

If you have an existing project, refer to the Adding Prisma to an existing Platformatic DB project section. If you're adding Prisma to a new project, refer to the Adding Prisma to a new project.

Adding Prisma to an existing project

If you have an existing Platformatic DB project, you can introspect your database and generate the data model in your Prisma schema with the following command:

npx prisma db pull

The command will introspect your database and generate the data model

Next, add the @@ignore attribute to the versions model to exclude it from the Prisma Client API:

model versions {
version BigInt @id
name String?
md5 String?
run_at DateTime? @db.Timestamptz(6)

+ @@ignore
}

To learn how you can evolve your database schema, you can jump to the Evolving your database schema section.

Adding Prisma to a new project

Define a Post model with the following fields at the end of your schema.prisma file:

prisma/schema.prisma
model Post {
id Int @id @default(autoincrement())
title String
content String?
published Boolean @default(false)
viewCount Int @default(0)
createdAt DateTime @default(now())

@@map("posts")
}

The snippet above defines a Post model with the following fields and properties:

  • id: An auto-incrementing integer that will be the primary key for the model.
  • title: A non-nullable String field.
  • content: A nullable String field.
  • published: A Boolean field with a default value of false.
  • viewCount: An Int field with a default value of 0.
  • createdAt: A DateTime field with a timestamp of when the value is created as its default value.

By default, Prisma maps the model name and its format to the table name — which is also used im Prisma Client. Platformatic DB uses a snake casing and pluralized table names to map your table names to the generated API. The @@map() attribute in the Prisma schema allows you to define the name and format of your table names to be used in your database. You can also use the @map() attribute to define the format for field names to be used in your database. Refer to the Foreign keys and table names naming conventions section to learn how you can automate formatting foreign keys and table names.

Next, run the following command to generate an up and down migration:

npx db-diff

The previous command will generate both an up and down migration based on your schema. The generated migration is stored in your ./migrations directory. If you are currently using a different path to store the migration, you can provide the --migrations-dir flag followed by the path.

You can then apply the generated migration using the Platformatic DB CLI:

npx platformatic db migrations apply

Platformatic uses Postgrator to run migrations. Postgrator creates a table in the database called versions to track the applied migrations. Since the versions table is not yet captured in the Prisma schema, run the following command to introspect the database and populate it with the missing model:

npx prisma db pull

Introspecting the database to populate the model prevents including the versions table in the generated down migrations.

Your Prisma schema should now contain a versions model that is similar to this one (it will vary depending on the database system you're using):

model Post {
id Int @id @default(autoincrement())
title String
content String?
published Boolean @default(false)
viewCount Int @default(0)
createdAt DateTime @default(now())

@@map("posts")
}

+model versions {
+ version BigInt @id
+ name String?
+ md5 String?
+ run_at DateTime? @db.Timestamptz(6)
+}

Add the @@ignore attribute function to the model to exclude it from the Prisma Client API:

model versions {
version BigInt @id
name String?
md5 String?
run_at DateTime? @db.Timestamptz(6)

+ @@ignore
}

Evolving your database schema

Update the data model in your Prisma schema by adding a model or a field:

// based on the schema in the "Adding Prisma to a new project" section
+model User {
+ id Int @id @default(autoincrement())
+ email String @unique
+ name String?
+ posts Post[]
+
+ @@map("users")
+}

model Post {
id Int @id @default(autoincrement())
createdAt DateTime @default(now())
title String
content String?
published Boolean @default(false)
viewCount Int @default(0)
+ author User? @relation(fields: [authorId], references: [id])
+ authorId Int? @map("author_id")

@@map("posts")
}

Next, use the @ruheni/db-diff CLI tool to generate up and down migrations:

npx db-diff

This command will generate up and down migrations based off of your Prisma schema. If you are currently using a different path to store the migration, you can provide the --migrations-dir flag followed by the path.

Next, apply the generated migration using the Platformatic CLI:

npx platformatic db migrations apply

And you're done!

Using Prisma Client in your plugins

Plugins allow you to add custom functionality to your REST and GraphQL API. Refer to the Add Custom Functionality to learn more how you can add custom functionality.

danger

Prisma Client usage with Platformatic is currently only supported in Node v18

You can use Prisma Client to interact with your database in your plugin.

To get started, run the following command:

npx prisma generate

The above command installs the @prisma/client in your project and generates a Prisma Client based off of your Prisma schema.

Install @sabinthedev/fastify-prisma fastify plugin. The plugin takes care of shutting down database connections and makes Prisma Client available as a Fastify plugin.

npm install @sabinthedev/fastify-prisma

Register the plugin and extend your REST API:

// 1.
const prismaPlugin = require("@sabinthedev/fastify-prisma")

module.exports = async (app) => {
app.log.info('plugin loaded')

// 2.
app.register(prismaPlugin)

/**
* Plugin logic
*/
// 3.
app.put('/post/:id/views', async (req, reply) => {

const { id } = req.params

// 4.
const post = await app.prisma.post.update({
where: {
id: Number(id)
},
data: {
viewCount: {
increment: 1
}
}
})

// 5.
return reply.send(post)
})
}

The snippet does the following:

  1. Imports the plugin
  2. Registers the @sabinthedev/fastify-prisma
  3. Defines the endpoint for incrementing the views of a post
  4. Makes a query to the database on the Post model to increment a post's view count
  5. Returns the updated post on success

If you would like to extend your GraphQL API, extend the schema and define the corresponding resolver:

plugin.js
// ./plugin.js
const prismaPlugin = require("@sabinthedev/fastify-prisma")

module.exports = async (app) => {
app.log.info('plugin loaded')

app.graphql.extendSchema(`
extend type Mutation {
incrementPostViewCount(id: ID): Post
}
`)

app.graphql.defineResolvers({
Mutation: {
incrementPostViewCount: async (_, { id }) => {
const post = await prisma.post.update({
where: {
id: Number(id)
},
data: {
viewCount: {
increment: 1
}
}
})

if (!post) throw new Error(`Post with id:${id} was not found`)
return post
}
}
})
}

Start the server:

npx platformatic db start

The query should now be included in your GraphQL schema.

You can also use the Prisma Client in your REST API endpoints.

Workarounds

Using Prisma with SQLite

Currently, Prisma doesn't resolve the file path of a SQLite database the same way as Platformatic does.

If your database is at the root of the project, create a new environment variable that Prisma will use called PRISMA_DATABASE_URL:

# .env
DATABASE_URL="sqlite://db.sqlite"
PRISMA_DATABASE_URL="file:../db.sqlite"

Next, update the url value in the datasource block in your Prisma schema with the updated value:

prisma/schema.prisma
// ./prisma/schema.prisma
datasource db {
provider = "sqlite"
url = env("PRISMA_DATABASE_URL")
}

Running migrations should now work smoothly and the path will be resolved correctly.

Foreign keys, field, and table names naming conventions

Foreign key names should use underscores, e.g. author_id, for Platformatic DB to correctly map relations. You can use the @map("") attribute to define the names of your foreign keys and field names to be defined in the database.

Table names should be mapped to use the naming convention expected by Platformatic DB e.g. @@map("recipes") (the Prisma convention is Recipe, which corresponds with the model name).

You can use prisma-case-format to enforce your own database conventions, i.e., pascal, camel, and snake casing.

Learn more

If you would like to learn more about Prisma, be sure to check out the Prisma docs.

+ + + + \ No newline at end of file diff --git a/docs/0.41.1/guides/securing-platformatic-db/index.html b/docs/0.41.1/guides/securing-platformatic-db/index.html new file mode 100644 index 00000000000..5ea385b2023 --- /dev/null +++ b/docs/0.41.1/guides/securing-platformatic-db/index.html @@ -0,0 +1,31 @@ + + + + + +Securing Platformatic DB with Authorization | Platformatic Open Source Software + + + + + +
+
Version: 0.41.1

Securing Platformatic DB with Authorization

Introduction

Authorization in Platformatic DB is role-based. User authentication and the +assignment of roles must be handled by an external authentication service. +Take a look to at the reference documentation for Authorization.

The goal of this simple guide is to protect an API built with Platformatic DB +with the use of a shared secret, that we call adminSecret. We want to prevent +any user that is not an admin to access the data.

The use of an adminSecret is a simplistic way of securing a system. +It is a crude way for limiting access and not suitable for production systems, +as the risk of leaking the secret is high in case of a security breach. +A production friendly way would be to issue a machine-to-machine JSON Web Token, +ideally with an asymmetric key. Alternatively, you can defer to an external +service via a Web Hook.

Please refer to our guide to set up Auth0 for more information +on JSON Web Tokens.

Block access to all entities, allow admins

The following configuration will block all anonymous users (e.g. each user without a known role) +to access every entity:

{
...
"authorization": {
"adminSecret": "replaceWithSomethingRandomAndSecure"
}
}

The data will still be available if the X-PLATFORMATIC-ADMIN-SECRET HTTP header +is specified when making HTTP calls, like so:

curl -H 'X-PLATFORMATIC-ADMIN-SECRET: replaceWithSomethingRandomAndSecure' http://127.0.0.1:3042/pages
info

Configuring JWT or Web Hooks will have the same result of configuring an admin secret.

Authorization rules

Rules can be provided based on entity and role in order to restrict access and provide fine grained access. +To make an admin only query and save the page table / page entity using adminSecret this structure should be used in the platformatic.db configuration file:

  ...
"authorization": {
"adminSecret": "easy",
"rules": [{
"entity": "movie"
"role": "platformatic-admin",
"find": true,
"save": true,
"delete": false,
}
]
}
info

Note that the role of an admin user from adminSecret strategy is platformatic-admin by default.

Read-only access to anonymous users

The following configuration will allo all anonymous users (e.g. each user without a known role) +to access the pages table / page entity in Read-only mode:

{
...
"authorization": {
"adminSecret": "replaceWithSomethingRandomAndSecure"
"rules": [{
"role": "anonymous",
"entity": "page",
"find": true,
"save": false,
"delete": false
}]
}
}

Note that we set find as true to allow the access, while the other options are false.

Work in Progress

This guide is a Work-In-Progress. Let us know what other common authorization use cases we should cover.

+ + + + \ No newline at end of file diff --git a/docs/0.41.1/guides/seed-a-database/index.html b/docs/0.41.1/guides/seed-a-database/index.html new file mode 100644 index 00000000000..7ae72effcbb --- /dev/null +++ b/docs/0.41.1/guides/seed-a-database/index.html @@ -0,0 +1,21 @@ + + + + + +Seed a Database | Platformatic Open Source Software + + + + + +
+
Version: 0.41.1

Seed a Database

A database is as useful as the data that it contains: a fresh, empty database +isn't always the best starting point. We can add a few rows from our migrations +using SQL, but we might need to use JavaScript from time to time.

The platformatic db seed command allows us to run a +script that will populate — or "seed" — our database.

Example

Our seed script should export a Function that accepts an argument: +an instance of @platformatic/sql-mapper.

seed.js
'use strict'

module.exports = async function ({ entities, db, sql }) {
await entities.graph.save({ input: { name: 'Hello' } })
await db.query(sql`
INSERT INTO graphs (name) VALUES ('Hello 2');
`)
}

We can then run the seed script with the Platformatic CLI:

npx platformatic db seed seed.js
+ + + + \ No newline at end of file diff --git a/docs/0.41.1/guides/telemetry/index.html b/docs/0.41.1/guides/telemetry/index.html new file mode 100644 index 00000000000..f589f95a2f1 --- /dev/null +++ b/docs/0.41.1/guides/telemetry/index.html @@ -0,0 +1,21 @@ + + + + + +Telemetry with Jaeger | Platformatic Open Source Software + + + + + +
+
Version: 0.41.1

Telemetry with Jaeger

Introduction

Platformatic supports Open Telemetry integration. This allows you to send telemetry data to one of the OTLP compatible servers (see here) or to a Zipkin server. Let's show this with Jaeger.

Jaeger setup

The quickest way is to use docker:

docker run -d --name jaeger \
-e COLLECTOR_OTLP_ENABLED=true \
-p 16686:16686 \
-p 4317:4317 \
-p 4318:4318 \
jaegertracing/all-in-one:latest

Check that the server is running by opening http://localhost:16686/ in your browser.

Platformatic setup

Will test this with a Platformatic Composer that proxy requests to a Platformatic Service, which in turn invokes a Platformatic DB Service. +In this way we show that the telemetry is propagated from the Composer throughout the services and the collected correctly. +Let's setup all this components:

Platformatic DB Service

Create a folder for DB and cd into it:

mkdir test-db
cd test-db

Then create a db in the folder using npx create-platformatic@latest:

npx create-platformatic@latest

To make it simple, use sqlite and create/apply the default migrations. This DB Service is exposed on port 5042:


➜ npx create-platformatic@latest

Hello user, welcome to Platformatic 0.32.0!
Let's start by creating a new project.
? Which kind of project do you want to create? DB
? Where would you like to create your project? .
? What database do you want to use? SQLite
? Do you want to use the connection string "sqlite://./db.sqlite"? Confirm
? Do you want to create default migrations? yes
? Do you want to create a plugin? no
? Do you want to use TypeScript? no
? What port do you want to use? 5042
[15:40:46] INFO: Configuration file platformatic.db.json successfully created.
[15:40:46] INFO: Environment file .env successfully created.
[15:40:46] INFO: Migrations folder migrations successfully created.
[15:40:46] INFO: Migration file 001.do.sql successfully created.
[15:40:46] INFO: Migration file 001.undo.sql successfully created.
[15:40:46] INFO: Plugin file created at plugin.js
? Do you want to run npm install? no
? Do you want to apply migrations? yes
...done!
? Do you want to generate types? no
? Do you want to create the github action to deploy this application to Platformatic Cloud dynamic workspace? no
? Do you want to create the github action to deploy this application to Platformatic Cloud static workspace? no

All done! Please open the project directory and check the README.
Will test this in one example with a Platformatic Composer that proxy requests to a Platformatic Service, which in turn invokes a Platformatic DB.

Open the platformatic.db.json file and add the telementry configuration:

  "telemetry": {
"serviceName": "test-db",
"exporter": {
"type": "otlp",
"options": {
"url": "http://localhost:4318/v1/traces"
}
}
}

Finally, start the DB service:

npx platformatic db start

Platformatic Service

Create at the same level of test-db another folder for Service and cd into it:

mkdir test-service
cd test-service

Then create a service on the 5043 port in the folder using npx create-platformatic@latest:

➜ npx create-platformatic@latest

Hello user, welcome to Platformatic 0.32.0!
Let's start by creating a new project.
? Which kind of project do you want to create? Service
? Where would you like to create your project? .
? Do you want to run npm install? no
? Do you want to use TypeScript? no
? What port do you want to use? 5043
[15:55:35] INFO: Configuration file platformatic.service.json successfully created.
[15:55:35] INFO: Environment file .env successfully created.
[15:55:35] INFO: Plugins folder "plugins" successfully created.
[15:55:35] INFO: Routes folder "routes" successfully created.
? Do you want to create the github action to deploy this application to Platformatic Cloud dynamic workspace? no
? Do you want to create the github action to deploy this application to Platformatic Cloud static workspace? no

Open the platformatic.service.json file and add the following telemetry configuration (it's exactly the same as DB, but with a different serviceName)

  "telemetry": {
"serviceName": "test-service",
"exporter": {
"type": "otlp",
"options": {
"url": "http://localhost:4318/v1/traces"
}
}
}

We want this service to invoke the DB service, so we need to add a client for test-db to it:

npx platformatic client http://127.0.0.1:5042 js --name movies

Check platformatic.service.json to see that the client has been added (PLT_MOVIES_URL is defined in .env):

    "clients": [
{
"schema": "movies/movies.openapi.json",
"name": "movies",
"type": "openapi",
"url": "{PLT_MOVIES_URL}"
}
]

Now open routes/root.js and add the following:

  fastify.get('/movies-length', async (request, reply) => {
const movies = await request.movies.getMovies()
return { length: movies.length }
})

This code calls movies to get all the movies and returns the length of the array.

Finally, start the service:

npx platformatic service start

Platformatic Composer

Create at the same level of test-db and test-service another folder for Composer and cd into it:

mkdir test-composer
cd test-composer

Then create a composer on the 5044 port in the folder using npx create-platformatic@latest:

➜ npx create-platformatic@latest

Hello marcopiraccini, welcome to Platformatic 0.32.0!
Let's start by creating a new project.
? Which kind of project do you want to create? Composer
? Where would you like to create your project? .
? What port do you want to use? 5044
? Do you want to run npm install? no
[16:05:28] INFO: Configuration file platformatic.composer.json successfully created.
[16:05:28] INFO: Environment file .env successfully created.
? Do you want to create the github action to deploy this application to Platformatic Cloud dynamic workspace? no
? Do you want to create the github action to deploy this application to Platformatic Cloud static workspace? no

All done! Please open the project directory and check the README.

Open platformatic.composer.js and change it to the following:

{
"$schema": "https://platformatic.dev/schemas/v0.32.0/composer",
"server": {
"hostname": "{PLT_SERVER_HOSTNAME}",
"port": "{PORT}",
"logger": {
"level": "{PLT_SERVER_LOGGER_LEVEL}"
}
},
"composer": {
"services": [
{
"id": "example",
"origin": "http://127.0.0.1:5043",
"openapi": {
"url": "/documentation/json"
}
}
],
"refreshTimeout": 3000
},
"telemetry": {
"serviceName": "test-composer",
"exporter": {
"type": "otlp",
"options": {
"url": "http://localhost:4318/v1/traces"
}
}
},
"watch": true
}

Note that we just added test-service as origin of the proxed service and added the usual telementry configuration, with a different serviceName.

Finally, start the composer:

npx platformatic composer start

Run the Test

Check that the composer is exposing movies-length opening: http://127.0.0.1:5044/documentation/

You should see: +image

To add some data, we can POST directly to the DB service (port 5042):

curl -X POST -H "Content-Type: application/json" -d '{"title":"The Matrix"}' http://127.0.0.1:5042/movies 
curl -X POST -H "Content-Type: application/json" -d '{"title":"The Matrix Reloaded"}' http://127.0.0.1:5042/movies

Now, let's check that the composer (port 5044) is working:

curl http://127.0.0.1:5044/movies-length

If the composer is working correctly, you should see:

{"length":2}

However, the main interest of this example is to show how to use the Platformatic Telemetry, so let's check it. +Open the Jaeger UI at http://localhost:16686/ and you should see something like this:

image

Select on the left the test-composer service and the GET /movies-length operation, click on "Find traces" and you should see something like this:

image

You can then click on the trace and see the details:

image

Note that everytime a request is received or client call is done, a new span is started. So we have:

  • One span for the request received by the test-composer
  • One span for the client call to test-service
  • One span for the request received by test-service
  • One span for the client call to test-db
  • One span for the request received by test-db

All these spans are linked together, so you can see the whole trace.

What if you want to use Zipkin?

Starting from this example, it's also possible to run the same test using Zipkin. To do so, you need to start the Zipkin server:

docker run -d -p 9411:9411 openzipkin/zipkin

Then, you need to change the telemetry configuration in all the platformatic.*.json to the following (only the exporter object is different`)

  "telemetry": {
(...)
"exporter": {
"type": "zipkin",
"options": {
"url": "http://127.0.0.1:9411/api/v2/spans"
}
}
}

The zipkin ui is available at http://localhost:9411/

+ + + + \ No newline at end of file diff --git a/docs/0.41.1/platformatic-cloud/deploy-database-neon/index.html b/docs/0.41.1/platformatic-cloud/deploy-database-neon/index.html new file mode 100644 index 00000000000..3cbf4397111 --- /dev/null +++ b/docs/0.41.1/platformatic-cloud/deploy-database-neon/index.html @@ -0,0 +1,32 @@ + + + + + +Deploy a PostgreSQL database with Neon | Platformatic Open Source Software + + + + + +
+
Version: 0.41.1

Deploy a PostgreSQL database with Neon

Neon offers multi-cloud fully managed +Postgres with a generous free tier. They separated storage and +compute to offer autoscaling, branching, and bottomless storage. +It offers a great environment for creating database preview +environments for your Platformatic DB +applications.

This guide shows you how to integrate Neon branch deployments with your +Platformatic app's GitHub Actions workflows. It assumes you have already +followed the Quick Start Guide.

Create a project on Neon

To set up an account with Neon, open their website, sign up and create a +new project.

Take note of the following configuration setting values:

  • The connection string for your main branch database, to be stored in a NEON_DB_URL_PRODUCTION secret
  • The Project ID (available under the project Settings), to be stored in a NEON_PROJECT_ID secret
  • Your API key (available by clicking on your user icon > Account > Developer settings), to be stored under NEON_API_KEY

You can learn more about Neon API keys in their Manage API Keys documentation.

Configure Github Environments and Secrets

Now you need to set the configuration values listed above as +repository secrets +on your project's GitHub repository. +Learn how to use environments for deployment in GitHub's documentation.

Configure the GitHub Environments for your repository to have:

  • production secrets, available only to the main branch:
    • NEON_DB_URL_PRODUCTION
  • previews secrets available to all branches:
    • NEON_PROJECT_ID
    • NEON_API_KEY

Configure the main branch workflow

Replace the contents of your app's workflow for static workspace deployment:

.github/workflows/platformatic-static-workspace-deploy.yml
name: Deploy Platformatic application to the cloud
on:
push:
branches:
- main
paths-ignore:
- 'docs/**'
- '**.md'

jobs:
build_and_deploy:
environment:
name: production
permissions:
contents: read
runs-on: ubuntu-latest
steps:
- name: Checkout application project repository
uses: actions/checkout@v4
- name: npm install --omit=dev
run: npm install --omit=dev
- name: Deploy project
uses: platformatic/onestep@latest
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
platformatic_workspace_id: <YOUR_STATIC_WORKSPACE_ID>
platformatic_workspace_key: ${{ secrets.PLATFORMATIC_STATIC_WORKSPACE_API_KEY }}
platformatic_config_path: ./platformatic.db.json
secrets: DATABASE_URL
env:
DATABASE_URL: ${{ secrets.NEON_DB_URL_PRODUCTION }}
PLT_SERVER_LOGGER_LEVEL: info
PORT: 3042
PLT_SERVER_HOSTNAME: 127.0.0.1

Replace <YOUR_STATIC_WORKSPACE_ID> with the workspace ID that you previously had in this file.

When your app is deployed to the static workspace it will now be configured to connect to the +main branch database for your Neon project.

Configure the preview environment workflow

Neon allows up to 10 database branches on their free tier. You can automatically create a new +database branch when a pull request is opened, and then automatically remove it when the pull +request is merged.

GitHub Action to create a preview environment

Replace the contents of your app's workflow for dynamic workspace deployment:

.github/workflows/platformatic-dynamic-workspace-deploy.yml
name: Deploy to Platformatic cloud
on:
pull_request:
paths-ignore:
- 'docs/**'
- '**.md'

# This allows a subsequently queued workflow run to interrupt previous runs
concurrency:
group: "${{ github.workflow }} @ ${{ github.event.pull_request.head.label || github.head_ref || github.ref }}"
cancel-in-progress: true

jobs:
build_and_deploy:
runs-on: ubuntu-latest
environment:
name: development
steps:
- name: Checkout application project repository
uses: actions/checkout@v4
- name: npm install --omit=dev
run: npm install --omit=dev
- name: Get PR number
id: get_pull_number
run: |
pull_number=$(jq --raw-output .pull_request.number "$GITHUB_EVENT_PATH")
echo "pull_number=${pull_number}" >> $GITHUB_OUTPUT
echo $pull_number
- uses: neondatabase/create-branch-action@v4
with:
project_id: ${{ secrets.NEON_PROJECT_ID }}
branch_name: pr-${{ steps.get_pull_number.outputs.pull_number }}
api_key: ${{ secrets.NEON_API_KEY }}
id: create-branch
- name: Deploy project
uses: platformatic/onestep@latest
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
platformatic_workspace_id: ${{ secrets.PLATFORMATIC_DYNAMIC_WORKSPACE_ID }}
platformatic_workspace_key: ${{ secrets.PLATFORMATIC_DYNAMIC_WORKSPACE_KEY }}
platformatic_config_path: ./platformatic.db.json
env:
DATABASE_URL: ${{ steps.create-branch.outputs.db_url }}
PLT_SERVER_LOGGER_LEVEL: info
PORT: 3042
PLT_SERVER_HOSTNAME: 127.0.0.1

Replace <YOUR_DYNAMIC_WORKSPACE_ID> with the workspace ID that you previously had in this file.

Configure preview environment cleanup

After a pull request to the main branch is merged, you should remove the matching database branch.

Create a new file, .github/workflows/cleanup-neon-branch-db.yml, and copy and paste in the following +workflow configuration:

.github/workflows/cleanup-neon-branch-db.yml
name: Cleanup Neon Database Branch
on:
push:
branches:
- 'main'
jobs:
delete-branch:
environment:
name: development
permissions: write-all
runs-on: ubuntu-latest
steps:
- name: Get PR info
id: get-pr-info
uses: actions-ecosystem/action-get-merged-pull-request@v1.0.1
with:
github_token: ${{secrets.GITHUB_TOKEN}}
- run: |
echo ${{ steps.get-pr-info.outputs.number}}
- name: Delete Neon Branch
if: ${{ steps.get-pr-info.outputs.number }}
uses: neondatabase/delete-branch-action@v3
with:
project_id: ${{ secrets.NEON_PROJECT_ID }}
branch: pr-${{ steps.get-pr-info.outputs.number }}
api_key: ${{ secrets.NEON_API_KEY }}

Deployment

To deploy these changes to your app:

  1. Create a Git branch locally (git checkout -b <BRANCH_NAME>)
  2. Commit your changes and push them to GitHub
  3. Open a pull request on GitHub - a branch will automatically be created for your Neon database and a preview app will be deployed to Platformatic Cloud (in your app's dynamic workspace).
  4. Merge the pull request - the Neon databsase branch will be automatically deleted and your app will be deployed to Platformatic Cloud (in your app's static workspace).
+ + + + \ No newline at end of file diff --git a/docs/0.41.1/platformatic-cloud/pricing/index.html b/docs/0.41.1/platformatic-cloud/pricing/index.html new file mode 100644 index 00000000000..405030aef58 --- /dev/null +++ b/docs/0.41.1/platformatic-cloud/pricing/index.html @@ -0,0 +1,23 @@ + + + + + +Platformatic Cloud Pricing | Platformatic Open Source Software + + + + + +
+
Version: 0.41.1

Platformatic Cloud Pricing

Find the plan that works best for you!

FreeBasicAdvancedPro
Pricing$0$4.99$22.45$49.99
Slots01512
CNAME-truetruetrue
Always On-truetruetrue

FAQ

What is a slot?

One slot is equal to one compute unit. The free plan has no always-on +machines and they will be stopped while not in use.

What is a workspace?

A workspace is the security boundary of your deployment. You will use +the same credentials to deploy to one.

A workspace can be either static or dynamic. +A static workspace always deploy to the same domain, while +in a dynamic workspace each deployment will have its own domain. +The latter are useful to provde for pull request previews.

Can I change or upgrade my plan after I start using Platformatic?

Plans can be changed or upgraded at any time

What does it mean I can set my own CNAME?

Free applications only gets a *.deploy.space domain name to access +their application. All other plans can set it to a domain of their chosing.

+ + + + \ No newline at end of file diff --git a/docs/0.41.1/platformatic-cloud/quick-start-guide/index.html b/docs/0.41.1/platformatic-cloud/quick-start-guide/index.html new file mode 100644 index 00000000000..6126c75e91b --- /dev/null +++ b/docs/0.41.1/platformatic-cloud/quick-start-guide/index.html @@ -0,0 +1,45 @@ + + + + + +Cloud Quick Start Guide | Platformatic Open Source Software + + + + + +
+
Version: 0.41.1

Cloud Quick Start Guide

This guide shows you how to create and deploy an application to +Platformatic Cloud.

Prerequisites

To follow along with this guide you'll need to have these things installed:

You will also need to have a GitHub account.

Log in to Platformatic Cloud

Go to the Platformatic Cloud website and click on the +Continue with GitHub button. You'll be transferred to a GitHub page that +asks you to Authorize Platformatic Cloud. To continue, click on the +Authorize platformatic button.

Screenshot of Continue with GitHub button

On the Platformatic Cloud Service Agreements page, check the boxes and +click the Continue button. You'll then be redirected to your Cloud Dashboard page.

Create a Cloud app

Screenshot of an empty Apps page

Click the Create an app now button on your Cloud Dashboard page.

Enter quick-start-app as your application name. Click the Create Application button.

Create a static app workspace

Enter production as the name for your workspace. Then click on the Create Workspace button.

On the next page you'll see the Workspace ID and API key for your app workspace. +Copy them and store them somewhere secure for future reference, for example in a password manager app. +The API key will be used to deploy your app to the workspace that you've just created.

Click on the Back to dashboard button.

Create a dynamic app workspace

On your Cloud Dashboard, click on your app, then click on Create Workspace in the Workspaces +sidebar.

Screenshot of the create app workspace screen

The Dynamic Workspace option will be automatically enabled as you have already created a +static workspace. Dynamic workspaces can be used to deploy preview applications for GitHub +pull requests.

Enter development as the name for your workspace, then click on the Create Workspace button. +Copy the Workspace ID and API key and store them somewhere secure.

Create a GitHub repository

Go to the Create a new repository page on GitHub. +Enter quick-start-app as the Repository name for your new repository. +Click on the Add a README file checkbox and click the Create repository +button.

Add the workspace API keys as repository secrets

Go to the Settings tab on your app's GitHub repository. Click into the +Secrets and variables > Actions section and add the following secrets:

NameSecret
PLATFORMATIC_STATIC_WORKSPACE_IDYour app's static workspace ID
PLATFORMATIC_STATIC_WORKSPACE_API_KEYYour app's static workspace API key
PLATFORMATIC_DYNAMIC_WORKSPACE_IDYour app's dynamic workspace ID
PLATFORMATIC_DYNAMIC_WORKSPACE_API_KEYYour app's dynamic workspace API key

Click on the New repository secret button to add a secret.

tip

You can also use the GitHub CLI to set secrets on your GitHub repository, for example:

gh secret set \
--app actions \
--env-file <FILENAME_OF_ENV_FILE_WITH_SECRETS> \
--repos <YOUR_GITHUB_USERNAME>/<REPO_NAME>

Create a new Platformatic app

In your terminal, use Git to clone your repository from GitHub. For example:

git clone git@github.com:username/quick-start-app.git
tip

See the GitHub documentation for help with +Cloning a repository.

Now change in to the project directory:

cd quick-start-app

Now run this command to start the Platformatic creator wizard:

npm create platformatic@latest

This interactive command-line tool will ask you some questions about how you'd +like to set up your new Platformatic app. For this guide, select these options:

- Which kind of project do you want to create?     => DB
- Where would you like to create your project? => .
- Do you want to create default migrations? => yes
- Do you want to create a plugin? => yes
- Do you want to use TypeScript? => no
- Do you want to overwrite the existing README.md? => yes
- Do you want to run npm install? => yes (this can take a while)
- Do you want to apply the migrations? => yes
- Do you want to generate types? => yes
- Do you want to create the github action to deploy this application to Platformatic Cloud dynamic workspace? => yes
- Do you want to create the github action to deploy this application to Platformatic Cloud static workspace? => yes

Copy and paste your dynamic and static workspace IDs when prompted by the creator wizard.

Once the wizard is complete, you'll have a Platformatic app project in the +quick-start-app directory, with example migration files and a plugin script.

Deploy the app

In your project directory, commit your application with Git:

git add .

git commit -m "Add Platformatic app"

Now push your changes up to GitHub:

git push origin main

On the GitHub repository page in your browser click on the Actions tab. +You should now see the Platformatic Cloud deployment workflow running.

Test the deployed app

Screenshot of a static app workspace that has had an app deployed to it

Once the GitHub Actions deployment workflow has completed, go to the production workspace +for your app in Platformatic Cloud. Click on the link for the Entry Point. You should now +see the Platformatic DB app home page.

Click on the OpenAPI Documentation link to try out your app's REST API using the Swagger UI.

Screenshot of Swagger UI for a Platformatic DB app

Preview pull request changes

When a pull request is opened on your project's GitHub repository, a preview app will automatically +be deployed to your app's dynamic workspace.

To see a preview app in action, create a new Git branch:

git checkout -b add-hello-endpoint

Then open up your app's plugin.js file in your code editor. Add the following code inside +the existing empty function:

app.get('/hello', async function(request, reply) {
return { hello: 'from Platformatic Cloud' }
})

Save the changes, then commit and push them up to GitHub:

git add plugin.js

git commit -m "Add hello endpoint"

git push -u origin add-hello-endpoint

Now create a pull request for your changes on GitHub. At the bottom of the +pull request page you'll see that a deployment has been triggered to your +app's dynamic workspace.

Screenshot of checks on a GitHub pull request

Once the deployment has completed, a comment will appear on your pull request +with a link to the preview app.

Screenshot of a deployed preview app comment on a GitHub pull request

Click on the Application URL link. If you add /hello on to the URL, +you should receive a response from the endpoint that you just added to +your application.

Screenshot of a JSON response from an API endpoint

+ + + + \ No newline at end of file diff --git a/docs/0.41.1/reference/cli/index.html b/docs/0.41.1/reference/cli/index.html new file mode 100644 index 00000000000..3982c321ba9 --- /dev/null +++ b/docs/0.41.1/reference/cli/index.html @@ -0,0 +1,44 @@ + + + + + +Platformatic CLI | Platformatic Open Source Software + + + + + +
+
Version: 0.41.1

Platformatic CLI

Installation and usage

Install the Platformatic CLI as a dependency for your project:

npm install platformatic

Once it's installed you can run it with:

npx platformatic
info

The platformatic package can be installed globally, but installing it as a +project dependency ensures that everyone working on the project is using the +same version of the Platformatic CLI.

Commands

The Platformatic CLI provides the following commands:

help

Welcome to Platformatic. Available commands are:

  • help - display this message.
  • help <command> - show more information about a command.
  • db - start Platformatic DB; type platformatic db help to know more.
  • service - start Platformatic Service; type platformatic service help to know more.
  • upgrade - upgrade the Platformatic configuration to the latest version.
  • gh - create a new gh action for Platformatic deployments.
  • deploy - deploy a Platformatic application to the cloud.
  • runtime - start Platformatic Runtime; type platformatic runtime help to know more.
  • start - start a Platformatic application.
  • frontend- create frontend code to consume the REST APIs.

compile

Compile all typescript plugins.

  $ platformatic compile

This command will compile the TypeScript plugins for each platformatic application.

deploy

Deploys an application to the Platformatic Cloud.

 $ platformatic deploy

Options:

  • -t, --type static/dynamic - The type of the workspace.
  • -c, --config FILE - Specify a configuration file to use.
  • -k, --keys FILE - Specify a path to the workspace keys file.
  • -l --label TEXT - The deploy label. Only for dynamic workspaces.
  • -e --env FILE - The environment file to use. Default: ".env"
  • -s --secrets FILE - The secrets file to use. Default: ".secrets.env"
  • --workspace-id uuid - The workspace id where the application will be deployed.
  • --workspace-key TEXT - The workspace key where the application will be deployed.
  1. To deploy a Platformatic application to the cloud, you should go to the Platformatic cloud dashboard and create a workspace.
  2. Once you have created a workspace, retrieve your workspace id and key from the workspace settings page. Optionally, you can download the provided workspace env file, which you can use with the --keys option.

ℹ️

When deploying an application to a dynamic workspace, specify the deploy --label option. You can find it on your cloud dashboard or you can specify a new one.

gh

Creates a gh action to deploy platformatic services on workspaces.

 $ platformatic gh -t dynamic

Options:

  • -w --workspace ID - The workspace ID where the service will be deployed.
  • -t, --type static/dynamic - The type of the workspace. Defaults to static.
  • -c, --config FILE - Specify a configuration file to use.
  • -b, --build - Build the service before deploying (npm run build).

If not specified, the configuration will be loaded from any of the following, in the current directory.

  • platformatic.db.json, or
  • platformatic.db.yml, or
  • platformatic.db.tml, or
  • platformatic.service.json, or
  • platformatic.service.yml, or
  • platformatic.service.tml

You can find more details about the configuration format here:

start

Start a Platformatic application with the following command:

$ platformatic start

Options:

  • -c, --config <path> - Path to the configuration file.
  • --inspect[=[host:]port] - Start the Node.js debugger. host defaults to '127.0.0.1'. port defaults to 9229. Use caution when binding to a public host:port combination.
  • --inspect-brk[=[host:]port] - Start the Node.js debugger and block until a client has attached. host defaults to '127.0.0.1'. port defaults to 9229. Use caution when binding to a public host:port combination.

upgrade

Upgrade the Platformatic schema configuration to the latest version.

 $ platformatic upgrade

Options:

  • -c, --config FILE - Specify a schema configuration file to use.

If not specified, the configuration will be loaded from any of the following, in the current directory.

  • platformatic.db.json, or
  • platformatic.db.yml, or
  • platformatic.db.tml, or
  • platformatic.service.json, or
  • platformatic.service.yml, or
  • platformatic.service.tml

You can find more details about the configuration format here:

client

platformatic client <command>

help

Create a Fastify plugin that exposes a client for a remote OpenAPI or GraphQL API.

To create a client for a remote OpenAPI API, you can use the following command:

$ platformatic client http://example.com/to/schema/file -n myclient

To create a client for a remote Graphql API, you can use the following command:

$ platformatic client http://example.com/graphql -n myclient

Instead of a URL, you can also use a local file:

$ platformatic client path/to/schema -n myclient

This will create a Fastify plugin that exposes a client for the remote API in a folder myclient +and a file named myclient.js inside it.

If platformatic config file is specified, it will be edited and a clients section will be added. +Then, in any part of your Platformatic application you can use the client.

You can use the client in your application in Javascript, calling a GraphQL endpoint:

module.exports = async function (app, opts) {
app.post('/', async (request, reply) => {
const res = await app.myclient.graphql({
query: 'query { hello }'
})
return res
})
}

or in Typescript, calling an OpenAPI endpoint:

import { FastifyInstance } from 'fastify'
/// <reference path="./myclient" />

export default async function (app: FastifyInstance) {
app.get('/', async () => {
return app.myclient.get({})
})
}

Options:

  • -c, --config <path> - Path to the configuration file.
  • -n, --name <name> - Name of the client.
  • -f, --folder <name> - Name of the plugin folder, defaults to --name value.
  • -t, --typescript - Generate the client plugin in TypeScript.
  • --full-response - Client will return full response object rather than just the body.
  • --full-request - Client will be called with all parameters wrapped in body, headers and query properties.
  • --full - Enables both --full-request and --full-response overriding them.
  • --optional-headers <headers> - Comma separated string of headers that will be marked as optional in the type file

composer

platformatic composer <command>

help

Available commands:

  • help - show this help message.
  • help <command> - shows more information about a command.
  • start - start the server.
  • openapi schemas fetch - fetch OpenAPI schemas from services.

openapi schemas fetch

Fetch OpenAPI schemas from remote services to use in your Platformatic project.

  $ platformatic composer openapi schemas fetch

It will fetch all the schemas from the remote services and store them by path +set in the platformatic.composer.json file. If the path is not set, it will +skip fetching the schema.

start

Start the Platformatic Composer server with the following command:

 $ platformatic composer start

You will need a configuration file. Here is an example to get you started, +save the following as platformatic.composer.json:

  {
"server": {
"hostname": "127.0.0.1",
"port": 0,
"logger": {
"level": "info"
}
},
"composer": {
"services": [
{
"id": "service1",
"origin": "http://127.0.0.1:3051",
"openapi": {
"url": "/documentation/json"
}
},
{
"id": "service2",
"origin": "http://127.0.0.1:3052",
"openapi": {
"file": "./schemas/service2.openapi.json"
}
}
],
"refreshTimeout": 1000
}
}

By sending the SIGUSR2 signal, the server can be reloaded.

Options:

  • -c, --config FILE - Specify a configuration file to use.

If not specified, the configuration will be loaded from any of the following, in the current directory.

  • platformatic.composer.json, or
  • platformatic.composer.yml, or
  • platformatic.composer.tml

You can find more details about the configuration format here:

db

platformatic db <command>

compile

Compile typescript plugins.

  $ platformatic db compile

As a result of executing this command, the Platformatic DB will compile typescript +plugins in the outDir directory.

If not specified, the configuration will be loaded from any of the following, in the current directory.

  • platformatic.db.json, or
  • platformatic.db.yml, or
  • platformatic.db.tml

You can find more details about the configuration format here:

help

Available commands:

  • help - show this help message.
  • help <command> - shows more information about a command.
  • start - start the server.
  • compile - compile typescript plugins.
  • seed - run a seed file.
  • types - generate typescript types for entities.
  • schema - generate and print api schema.
  • migrations create - generate do and undo migration files.
  • migrations apply - apply migration files.

migrations apply

Apply all configured migrations to the database:

  $ platformatic db migrations apply

The migrations will be applied in the order they are specified in the +folder defined in the configuration file. If you want to apply a specific migration, +you can use the --to option:

  $ platformatic db migrations apply --to 001

Here is an example migration:

  CREATE TABLE graphs (
id SERIAL PRIMARY KEY,
name TEXT
);

You can always rollback to a specific migration with:

  $ platformatic db migrations apply --to VERSION

Use 000 to reset to the initial state.

Options:

  • -c, --config <path> - Path to the configuration file.
  • -t, --to <version> - Migrate to a specific version.

If not specified, the configuration will be loaded from any of the following, in the current directory.

  • platformatic.db.json, or
  • platformatic.db.yml, or
  • platformatic.db.tml

You can find more details about the configuration format here:

migrations create

Create next migration files.

  $ platformatic db migrations create

It will generate do and undo sql files in the migrations folder. The name of the +files will be the next migration number.

  $ platformatic db migrations create --name "create_users_table"

Options:

  • -c, --config <path> - Path to the configuration file.

If not specified, the configuration will be loaded from any of the following, in the current directory.

  • platformatic.db.json, or
  • platformatic.db.yml, or
  • platformatic.db.tml

You can find more details about the configuration format here:

migrations

Available commands:

  • migrations create - generate do and undo migration files.
  • migrations apply - apply migration files.

schema

Update the config schema file:

  • schema config - update the JSON schema config available on platformatic.db.schema.json

Your configuration on platformatic.db.json has a schema defined to improve the developer experience and avoid mistakes when updating the configuration of Platformatic DB. +When you run platformatic db init, a new JSON $schema property is added in platformatic.db.schema.json. This can allow your IDE to add suggestions (f.e. mandatory/missing fields, types, default values) by opening the config in platformatic.db.json. +Running platformatic db schema config you can update your schema so that it matches well the latest changes available on your config.

Generate a schema from the database and prints it to standard output:

  • schema graphql - generate the GraphQL schema
  • schema openapi - generate the OpenAPI schema

Options:

  • -c, --config FILE - Specify a configuration file to use.

If not specified, the configuration will be loaded from any of the following, in the current directory.

  • platformatic.db.json, or
  • platformatic.db.yml, or
  • platformatic.db.tml

You can find more details about the configuration format here:

seed

Load a seed into the database. This is a convenience method that loads +a JavaScript file and configure @platformatic/sql-mapper to connect to +the database specified in the configuration file.

Here is an example of a seed file:

  'use strict'

module.exports = async function ({ entities, db, sql }) {
await entities.graph.save({ input: { name: 'Hello' } })
await db.query(sql`
INSERT INTO graphs (name) VALUES ('Hello 2');
`)
}

You can run this using the seed command:

  $ platformatic db seed seed.js

Options:

  • --config - Path to the configuration file.

If not specified, the configuration will be loaded from any of the following, in the current directory.

  • platformatic.db.json, or
  • platformatic.db.yml, or
  • platformatic.db.tml

You can find more details about the configuration format here:

start

Start the Platformatic DB server with the following command:

 $ platformatic db start

You will need a configuration file. Here is an example to get you started, +save the following as platformatic.db.json:

  {
"server": {
"hostname": "127.0.0.1",
"port": 0,
"logger": {
"level": "info"
}
},
"db": {
"connectionString": "sqlite://./db"
},
"migrations": {
"dir": "./migrations"
}
}

Remember to create a migration, run the db help migrate command to know more.

All outstanding migrations will be applied to the database unless the +migrations.autoApply configuration option is set to false.

By sending the SIGUSR2 signal, the server can be reloaded.

Options:

  • -c, --config FILE - Specify a configuration file to use.

If not specified, the configuration will be loaded from any of the following, in the current directory.

  • platformatic.db.json, or
  • platformatic.db.yml, or
  • platformatic.db.tml

You can find more details about the configuration format here:

types

Generate typescript types for your entities from the database.

  $ platformatic db types

As a result of executing this command, the Platformatic DB will generate a types +folder with a typescript file for each database entity. It will also generate a +global.d.ts file that injects the types into the Application instance.

In order to add type support to your plugins, you need to install some additional +dependencies. To do this, copy and run an npm install command with dependencies +that "platformatic db types" will ask you.

Here is an example of a platformatic plugin.js with jsdoc support. +You can use it to add autocomplete to your code.

/// <reference path="./global.d.ts" />
'use strict'

/** @param {import('fastify').FastifyInstance} app */
module.exports = async function (app) {
app.get('/movie', async () => {
const movies = await app.platformatic.entities.movie.find({
where: { title: { eq: 'The Hitchhiker\'s Guide to the Galaxy' } }
})
return movies[0].id
})
}

If not specified, the configuration will be loaded from any of the following, in the current directory.

  • platformatic.db.json, or
  • platformatic.db.yml, or
  • platformatic.db.tml

You can find more details about the configuration format here:

service

platformatic service <command>

compile

Compile typescript plugins.

  $ platformatic service compile

As a result of executing this command, Platformatic Service will compile typescript +plugins in the outDir directory.

If not specified, the configuration will be loaded from any of the following, in the current directory.

  • platformatic.service.json, or
  • platformatic.service.yml, or
  • platformatic.service.tml

You can find more details about the configuration format here:

help

Available commands:

  • help - show this help message.
  • help <command> - shows more information about a command.
  • start - start the server.
  • schema config - generate the schema configuration file.

schema

Update the config schema file:

  • schema config - update the JSON schema config available on platformatic.service.schema.json

Your configuration on platformatic.service.json has a schema defined to improve the developer experience and avoid mistakes when updating the configuration of Platformatic Service. +When you initialize a new Platformatic service (f.e. running npm create platformatic@latest), a new JSON $schema property is added in the platformatic.service.json config. This can allow your IDE to add suggestions (f.e. mandatory/missing fields, types, default values) by opening the config in platformatic.service.json. +Running platformatic service schema config you can update your schema so that it matches well the latest changes available on your config.

start

Start the Platformatic Service with the following command:

 $ platformatic service start

You will need a configuration file. Here is an example to get you started, +save the following as platformatic.service.json:

{
"server": {
"hostname": "127.0.0.1",
"port": 0,
"logger": {
"level": "info"
}
},
"plugin": {
"path": "./plugin.js"
}
}

frontend

platformatic frontend <url> <language>

Create frontend code to consume the REST APIs of a Platformatic application.

From the directory you want the frontend code to be generated (typically <YOUR_FRONTEND_APP_DIRECTORY>/src/) run -

npx platformatic frontend http://127.0.0.1:3042 ts

ℹ️

Where http://127.0.0.1:3042 must be replaced with your Platformatic application endpoint, and the language can either be ts or js. When the command is run, the Platformatic CLI generates -

  • api.d.ts - A TypeScript module that includes all the OpenAPI-related types.
  • api.ts or api.js - A module that includes a function for every single REST endpoint.

If you use the --name option it will create custom file names.

npx platformatic frontend http://127.0.0.1:3042 ts --name foobar

Will create foobar.ts and foobar-types.d.ts

Refer to the dedicated guide where the full process of generating and consuming the frontend code is described.

In case of problems, please check that:

  • The Platformatic app URL is valid.
  • The Platformatic app whose URL belongs must be up and running.
  • OpenAPI must be enabled (db.openapi in your platformatic.db.json is not set to false). You can find more details about the db configuration format here.
  • CORS must be managed in your Platformatic app (server.cors.origin.regexp in your platformatic.db.json is set to /*/, for instance). You can find more details about the cors configuration here.

runtime

platformatic runtime <command>

compile

Compile all typescript plugins for all services.

  $ platformatic runtime compile

This command will compile the TypeScript +plugins for each services registered in the runtime.

help

Available commands:

  • help - show this help message.
  • help <command> - shows more information about a command.
  • start - start the application.

start

Start the Platformatic Runtime with the following command:

 $ platformatic runtime start

start

Start a Platformatic application with the following command:

$ platformatic start

Options:

  • -c, --config <path> - Path to the configuration file.
  • --inspect[=[host:]port] - Start the Node.js debugger. host defaults to '127.0.0.1'. port defaults to 9229. Use caution when binding to a public host:port combination.
  • --inspect-brk[=[host:]port] - Start the Node.js debugger and block until a client has attached. host defaults to '127.0.0.1'. port defaults to 9229. Use caution when binding to a public host:port combination.
+ + + + \ No newline at end of file diff --git a/docs/0.41.1/reference/client/frontend/index.html b/docs/0.41.1/reference/client/frontend/index.html new file mode 100644 index 00000000000..243532eaa04 --- /dev/null +++ b/docs/0.41.1/reference/client/frontend/index.html @@ -0,0 +1,17 @@ + + + + + +Frontend client | Platformatic Open Source Software + + + + + +
+
Version: 0.41.1

Frontend client

Create implementation and type files that exposes a client for a remote OpenAPI server, that uses fetch and can run in any browser.

To create a client for a remote OpenAPI API, you can use the following command:

$ platformatic frontend http://exmaple.com/to/schema/file <language> --name <clientname>

where <language> can be either js or ts.

This will create two files clientname.js (or clientname.ts) and clientname-types.d.ts for types.

clientname by default is api

Usage

The implementation generated by the tool exports all the named operation found and a factory object.

Named operations

import { setBaseUrl, getMovies } from './api.js'

setBaseUrl('http://my-server-url.com') // modifies the global `baseUrl` variable

const movies = await getMovies({})
console.log(movies)

Factory

The factory object is called build and can be used like this

import build from './api.js'

const client = build('http://my-server-url.com')

const movies = await client.getMovies({})
console.log(movies)

You can use both named operations and the factory in the same file. They can work on different hosts, so the factory does not use the global setBaseUrl function.

Generated Code

The type file will look like this

export interface GetMoviesRequest {
'limit'?: number;
'offset'?: number;
// ... all other options
}

interface GetMoviesResponseOK {
'id': number;
'title': string;
}
export interface Api {
setBaseUrl(newUrl: string) : void;
getMovies(req: GetMoviesRequest): Promise<Array<GetMoviesResponseOK>>;
// ... all operations listed here
}

type PlatformaticFrontendClient = Omit<Api, 'setBaseUrl'>
export default function build(url: string): PlatformaticFrontendClient

The javascript implementation will look like this

let baseUrl = ''
/** @type {import('./api-types.d.ts').Api['setBaseUrl']} */
export const setBaseUrl = (newUrl) => { baseUrl = newUrl }

/** @type {import('./api-types.d.ts').Api['getMovies']} */
export const getMovies = async (request) => {
return await _getMovies(baseUrl, request)
}
async function _createMovie (url, request) {
const response = await fetch(`${url}/movies/`, {
method:'post',
body: JSON.stringify(request),
headers: {
'Content-Type': 'application/json'
}
})

if (!response.ok) {
throw new Error(await response.text())
}

return await response.json()
}

/** @type {import('./api-types.d.ts').Api['createMovie']} */
export const createMovie = async (request) => {
return await _createMovie(baseUrl, request)
}
// ...

export default function build (url) {
return {
getMovies: _getMovies.bind(url, ...arguments),
// ...
}
}

The typescript implementation will look like this

import type { Api } from './api-types'
import * as Types from './api-types'

let baseUrl = ''
export const setBaseUrl = (newUrl: string) : void => { baseUrl = newUrl }

const _getMovies = async (url: string, request: Types.GetMoviesRequest) => {
const response = await fetch(`${url}/movies/?${new URLSearchParams(Object.entries(request || {})).toString()}`)

if (!response.ok) {
throw new Error(await response.text())
}

return await response.json()
}

export const getMovies: Api['getMovies'] = async (request: Types.GetMoviesRequest) => {
return await _getMovies(baseUrl, request)
}
// ...
export default function build (url) {
return {
getMovies: _getMovies.bind(url, ...arguments),
// ...
}
}
+ + + + \ No newline at end of file diff --git a/docs/0.41.1/reference/client/introduction/index.html b/docs/0.41.1/reference/client/introduction/index.html new file mode 100644 index 00000000000..d1b18f8b193 --- /dev/null +++ b/docs/0.41.1/reference/client/introduction/index.html @@ -0,0 +1,34 @@ + + + + + +Platformatic Client | Platformatic Open Source Software + + + + + +
+
Version: 0.41.1

Platformatic Client

Create a Fastify plugin that exposes a client for a remote OpenAPI or GraphQL API.

To create a client for a remote OpenAPI API, you can use the following command:

$ platformatic client http://exmaple.com/to/schema/file --name myclient

To create a client for a remote Graphql API, you can use the following command:

$ platformatic client http://exmaple.com/grapqhl --name myclient

Usage with Platformatic Service or Platformatic DB

If you run the generator in in a Platformatic application, and it will +automatically extend it to load your client by editing the configuration file +and adding a clients section. +Then, in any part of your Platformatic application you can use the client.

You can use the client in your application in Javascript, calling a GraphQL endpoint:

// Use a typescript reference to set up autocompletion
// and explore the generated APIs.

/// <reference path="./myclient" />

/** @type {import('fastify').FastifyPluginAsync<{} */
module.exports = async function (app, opts) {
app.post('/', async (request, reply) => {
const res = await app.myclient.graphql({
query: 'query { movies { title } }'
})
return res
})
}

or in Typescript, calling an OpenAPI endpoint:

import { FastifyInstance } from 'fastify'
/// <reference path="./myclient" />

export default async function (app: FastifyInstance) {
app.get('/', async () => {
return app.myclient.get({})
})
}

The client configuration in the platformatic.db.json and platformatic.service.json would look like:

{
"clients": [{
"schema": "./myclient/myclient.openapi.json" // or ./myclient/myclient.schema.graphl
"name": "myclient",
"type": "openapi" // or graphql
"url": "{ PLT_MYCLIENT_URL }"
}]
}

Note that the generator would also have updated the .env and .env.sample files if they exists.

Generating a client for a service running within Platformatic Runtime

Platformatic Runtime allows you to create a network of services that are not exposed. +To create a client to invoke one of those services from another, run:

$ platformatic client --name <clientname> --runtime <serviceId>

Where <clientname> is the name of the client and <serviceId> is the id of the given service +(which correspond in the basic case with the folder name of that service). +The client generated is identical to the one in the previous section.

Note that this command looks for a platformatic.runtime.json in a parent directory.

Example

As an example, consider a network of three microservices:

  • somber-chariot, an instance of Platformatic DB;
  • languid-noblemen, an instance of Platformatic Service;
  • pricey-paesant, an instance of Platformatic Composer, which is also the runtime entrypoint.

From within the languid-noblemen folder, we can run:

$ platformatic client --name chariot --runtime somber-chariot

The client configuration in the platformatic.db.json and platformatic.service.json would look like:

{
"clients": [{
"path": "./chariot",
"serviceId": "somber-chariot"
}]
}

Even if the client is generated from an HTTP endpoint, it is possible to add a serviceId property each client object shown above. +This is not required, but if using the Platformatic Runtime, the serviceId +property will be used to identify the service dependency.

Types Generator

The types for the client are automatically generated for both OpenAPI and GraphQL schemas.

You can generate only the types with the --types-only flag.

For example

$ platformatic client http://exmaple.com/to/schema/file --name myclient --types-only

Will create the single myclient.d.ts file in current directory

OpenAPI

We provide a fully typed experience for OpenAPI, Typing both the request and response for +each individual OpenAPI operation.

Consider this example:

// Omitting all the individual Request and Reponse payloads for brevity

interface Client {
getMovies(req: GetMoviesRequest): Promise<Array<GetMoviesResponse>>;
createMovie(req: CreateMovieRequest): Promise<CreateMovieResponse>;
updateMovies(req: UpdateMoviesRequest): Promise<Array<UpdateMoviesResponse>>;
getMovieById(req: GetMovieByIdRequest): Promise<GetMovieByIdResponse>;
updateMovie(req: UpdateMovieRequest): Promise<UpdateMovieResponse>;
updateMovie(req: UpdateMovieRequest): Promise<UpdateMovieResponse>;
deleteMovies(req: DeleteMoviesRequest): Promise<DeleteMoviesResponse>;
getQuotesForMovie(req: GetQuotesForMovieRequest): Promise<Array<GetQuotesForMovieResponse>>;
getQuotes(req: GetQuotesRequest): Promise<Array<GetQuotesResponse>>;
createQuote(req: CreateQuoteRequest): Promise<CreateQuoteResponse>;
updateQuotes(req: UpdateQuotesRequest): Promise<Array<UpdateQuotesResponse>>;
getQuoteById(req: GetQuoteByIdRequest): Promise<GetQuoteByIdResponse>;
updateQuote(req: UpdateQuoteRequest): Promise<UpdateQuoteResponse>;
updateQuote(req: UpdateQuoteRequest): Promise<UpdateQuoteResponse>;
deleteQuotes(req: DeleteQuotesRequest): Promise<DeleteQuotesResponse>;
getMovieForQuote(req: GetMovieForQuoteRequest): Promise<GetMovieForQuoteResponse>;
}

type ClientPlugin = FastifyPluginAsync<NonNullable<client.ClientOptions>>

declare module 'fastify' {
interface FastifyInstance {
'client': Client;
}

interface FastifyRequest {
'client': Client;
}
}

declare namespace Client {
export interface ClientOptions {
url: string
}
export const client: ClientPlugin;
export { client as default };
}

declare function client(...params: Parameters<ClientPlugin>): ReturnType<ClientPlugin>;
export = client;

GraphQL

We provide a partially typed experience for GraphQL, because we do not want to limit +how you are going to query the remote system. Take a look at this example:

declare module 'fastify' {
interface GraphQLQueryOptions {
query: string;
headers: Record<string, string>;
variables: Record<string, unknown>;
}
interface GraphQLClient {
graphql<T>(GraphQLQuery): PromiseLike<T>;
}
interface FastifyInstance {
'client'
: GraphQLClient;

}

interface FastifyRequest {
'client'<T>(GraphQLQuery): PromiseLike<T>;
}
}

declare namespace client {
export interface Clientoptions {
url: string
}
export interface Movie {
'id'?: string;

'title'?: string;

'realeasedDate'?: string;

'createdAt'?: string;

'preferred'?: string;

'quotes'?: Array<Quote>;

}
export interface Quote {
'id'?: string;

'quote'?: string;

'likes'?: number;

'dislikes'?: number;

'movie'?: Movie;

}
export interface MoviesCount {
'total'?: number;

}
export interface QuotesCount {
'total'?: number;

}
export interface MovieDeleted {
'id'?: string;

}
export interface QuoteDeleted {
'id'?: string;

}
export const client: Clientplugin;
export { client as default };
}

declare function client(...params: Parameters<Clientplugin>): ReturnType<Clientplugin>;
export = client;

Given only you can know what GraphQL query you are producing, you are responsible for typing +it accordingly.

Usage with standalone Fastify

If a platformatic configuration file is not found, a complete Fastify plugin is generated to be +used in your Fastify application like so:

const fastify = require('fastify')()
const client = require('./your-client-name')

fastify.register(client, {
url: 'http://example.com'
})

// GraphQL
fastify.post('/', async (request, reply) => {
const res = await request.movies.graphql({
query: 'mutation { saveMovie(input: { title: "foo" }) { id, title } }'
})
return res
})

// OpenAPI
fastify.post('/', async (request, reply) => {
const res = await request.movies.createMovie({ title: 'foo' })
return res
})

fastify.listen({ port: 3000 })

Note that you would need to install @platformatic/client as a depedency.

How are the method names defined in OpenAPI

The names of the operations are defined in the OpenAPI specification. +Specifically, we use the operationId. +If that's not part of the spec, +the name is generated by combining the parts of the path, +like /something/{param1}/ and a method GET, it genertes getSomethingParam1.

Authentication

It's very common that downstream services requires some form of Authentication. +How could we add the necessary headers? You can configure them from your plugin:

/// <reference path="./myclient" />

/** @type {import('fastify').FastifyPluginAsync<{} */
module.exports = async function (app, opts) {
app.configureMyclient({
async getHeaders (req, reply) {
return {
'foo': 'bar'
}
}
})

app.post('/', async (request, reply) => {
const res = await app.myclient.graphql({
query: 'query { movies { title } }'
})
return res
})
}

Telemetry propagation

To correctly propagate telemetry information, be sure to get the client from the request object, e.g.:

fastify.post('/', async (request, reply) => {
const res = await request.movies.createMovie({ title: 'foo' })
return res
})
+ + + + \ No newline at end of file diff --git a/docs/0.41.1/reference/client/programmatic/index.html b/docs/0.41.1/reference/client/programmatic/index.html new file mode 100644 index 00000000000..dbe5e98a5d7 --- /dev/null +++ b/docs/0.41.1/reference/client/programmatic/index.html @@ -0,0 +1,17 @@ + + + + + +Programmatic API | Platformatic Open Source Software + + + + + +
+
Version: 0.41.1

Programmatic API

It is possible to use the Platformatic client without the generator.

OpenAPI Client

import { buildOpenAPIClient } from '@platformatic/client'

const client = await buildOpenAPIClient({
url: `https://yourapi.com/documentation/json`,
// path: 'path/to/openapi.json',
headers: {
'foo': 'bar'
}
})

const res = await client.yourOperationName({ foo: 'bar' })

console.log(res)

If you use Typescript you can take advantage of the generated types file

import { buildOpenAPIClient } from '@platformatic/client'
import Client from './client'
//
// interface Client {
// getMovies(req: GetMoviesRequest): Promise<Array<GetMoviesResponse>>;
// createMovie(req: CreateMovieRequest): Promise<CreateMovieResponse>;
// ...
// }
//

const client: Client = await buildOpenAPIClient<Client>({
url: `https://yourapi.com/documentation/json`,
// path: 'path/to/openapi.json',
headers: {
'foo': 'bar'
}
})

const res = await client.getMovies()
console.log(res)

GraphQL Client

import { buildGraphQLClient } from '@platformatic/client'

const client = await buildGraphQLClient({
url: `https://yourapi.com/graphql`,
headers: {
'foo': 'bar'
}
})

const res = await client.graphql({
query: `
mutation createMovie($title: String!) {
saveMovie(input: {title: $title}) {
id
title
}
}
`,
variables: {
title: 'The Matrix'
}
})

console.log(res)
+ + + + \ No newline at end of file diff --git a/docs/0.41.1/reference/composer/api-modification/index.html b/docs/0.41.1/reference/composer/api-modification/index.html new file mode 100644 index 00000000000..2cf22938e36 --- /dev/null +++ b/docs/0.41.1/reference/composer/api-modification/index.html @@ -0,0 +1,19 @@ + + + + + +API modification | Platformatic Open Source Software + + + + + +
+
Version: 0.41.1

API modification

If you want to modify automatically generated API, you can use composer custom onRoute hook.

addComposerOnRouteHook(openApiPath, methods, handler)

  • openApiPath (string) - A route OpenAPI path that Platformatic Composer takes from the OpenAPI specification.
  • methods (string[]) - Route HTTP methods that Platformatic Composer takes from the OpenAPI specification.
  • handler (function) - fastify onRoute hook handler.

onComposerResponse

onComposerResponse hook is called after the response is received from a composed service. +It might be useful if you want to modify the response before it is sent to the client. +If you want to use it you need to add onComposerResponse property to the config object of the route options.

  • request (object) - fastify request object.
  • reply (object) - fastify reply object.
  • body (object) - undici response body object.

Example

app.platformatic.addComposerOnRouteHook('/users/{id}', ['GET'], routeOptions => {
routeOptions.schema.response[200] = {
type: 'object',
properties: {
firstName: { type: 'string' },
lastName: { type: 'string' }
}
}

async function onComposerResponse (request, reply, body) {
const payload = await body.json()
const newPayload = {
firstName: payload.first_name,
lastName: payload.last_name
}
reply.send(newPayload)
}
routeOptions.config.onComposerResponse = onComposerResponse
})
+ + + + \ No newline at end of file diff --git a/docs/0.41.1/reference/composer/configuration/index.html b/docs/0.41.1/reference/composer/configuration/index.html new file mode 100644 index 00000000000..c2cfdc93b69 --- /dev/null +++ b/docs/0.41.1/reference/composer/configuration/index.html @@ -0,0 +1,38 @@ + + + + + +Configuration | Platformatic Open Source Software + + + + + +
+
Version: 0.41.1

Configuration

Platformatic Composer configured with a configuration file. It supports the use +of environment variables as setting values with configuration placeholders.

Configuration file

If the Platformatic CLI finds a file in the current working directory matching +one of these filenames, it will automatically load it:

  • platformatic.composer.json
  • platformatic.composer.json5
  • platformatic.composer.yml or platformatic.composer.yaml
  • platformatic.composer.tml or platformatic.composer.toml

Alternatively, a --config option with a configuration +filepath can be passed to most platformatic composer CLI commands.

The configuration examples in this reference use JSON.

Supported formats

FormatExtensions
JSON.json
JSON5.json5
YAML.yml, .yaml
TOML.tml

Comments are supported by the JSON5, YAML and TOML file formats.

Settings

Configuration settings are organised into the following groups:

Sensitive configuration settings containing sensitive data should be set using configuration placeholders.

server

A required object with the following settings:

  • hostname (required, string) — Hostname where Platformatic Composer server will listen for connections.

  • port (required, number) — Port where Platformatic Composer server will listen for connections.

  • healthCheck (boolean or object) — Enables the health check endpoint.

    • Powered by @fastify/under-pressure.
    • The value can be an object, used to specify the interval between checks in milliseconds (default: 5000)

    Example

    {
    "server": {
    ...
    "healthCheck": {
    "interval": 2000
    }
    }
    }
  • cors (object) — Configuration for Cross-Origin Resource Sharing (CORS) headers.

    • All options will be passed to the @fastify/cors plugin. In order to specify a RegExp object, you can pass { regexp: 'yourregexp' }, +it will be automatically converted.
  • logger (object) -- the logger configuration.

  • pluginTimeout (integer) -- the number of milliseconds to wait for a Fastify plugin to load, see the fastify docs for more details.

  • https (object) - Configuration for HTTPS supporting the following options.

    • key (required, string, object, or array) - If key is a string, it specifies the private key to be used. If key is an object, it must have a path property specifying the private key file. Multiple keys are supported by passing an array of keys.
    • cert (required, string, object, or array) - If cert is a string, it specifies the certificate to be used. If cert is an object, it must have a path property specifying the certificate file. Multiple certificates are supported by passing an array of keys.

metrics

Configuration for a Prometheus server that will export monitoring metrics +for the current server instance. It uses fastify-metrics +under the hood.

This setting can be a boolean or an object. If set to true the Prometheus server will listen on http://0.0.0.0:9090.

Supported object properties:

  • hostname (string) — The hostname where Prometheus server will listen for connections.
  • port (number) — The port where Prometheus server will listen for connections.
  • auth (object) — Basic Auth configuration. username and password are required here +(use environment variables).

plugins

An optional object that defines the plugins loaded by Platformatic Composer.

  • paths (required, array): an array of paths (string) +or an array of objects composed as follows,

    • path (string): Relative path to plugin's entry point.
    • options (object): Optional plugin options.
    • encapsulate (boolean): if the path is a folder, it instruct Platformatic to not encapsulate those plugins.
    • maxDepth (integer): if the path is a folder, it limits the depth to load the content from.
  • typescript (boolean): enable typescript compilation. A tsconfig.json file is required in the same folder.

    Example

    {
    "plugins": {
    "paths": [{
    "path": "./my-plugin.js",
    "options": {
    "foo": "bar"
    }
    }]
    }
    }

watch

Disable watching for file changes if set to false. It can also be customized with the following options:

  • ignore (string[], default: null): List of glob patterns to ignore when watching for changes. If null or not specified, ignore rule is not applied. Ignore option doesn't work for typescript files.

  • allow (string[], default: ['*.js', '**/*.js']): List of glob patterns to allow when watching for changes. If null or not specified, allow rule is not applied. Allow option doesn't work for typescript files.

    Example

    {
    "watch": {
    "ignore": ["*.mjs", "**/*.mjs"],
    "allow": ["my-plugin.js", "plugins/*.js"]
    }
    }

composer

Configure @platformatic/composer specific settings such as services or refreshTimeout:

  • services (array, default: []) — is an array of objects that defines +the services managed by the composer. Each service object supports the following settings:

    • id (required, string) - A unique identifier for the service.
    • origin (string) - A service origin. Skip this option if the service is executing inside of Platformatic Runtime. In this case, service id will be used instead of origin.
    • openapi (required, object) - The configuration file used to compose OpenAPI specification. See the openapi for details.
    • proxy (object or false) - Service proxy configuration. If false, the service proxy is disabled.
      • prefix (required, string) - Service proxy prefix. All service routes will be prefixed with this value.
    • refreshTimeout (number) - The number of milliseconds to wait for check for changes in the service OpenAPI specification. If not specified, the default value is 1000.

openapi

  • url (string) - A path of the route that exposes the OpenAPI specification. If a service is a Platformatic Service or Platformatic DB, use /documentation/json as a value. Use this or file option to specify the OpenAPI specification.
  • file (string) - A path to the OpenAPI specification file. Use this or url option to specify the OpenAPI specification.
  • prefix (string) - A prefix for the OpenAPI specification. All service routes will be prefixed with this value.
  • config (string) - A path to the OpenAPI configuration file. This file is used to customize the OpenAPI specification. See the openapi-configuration for details.
openapi-configuration

The OpenAPI configuration file is a JSON file that is used to customize the OpenAPI specification. It supports the following options:

  • ignore (boolean) - If true, the route will be ignored by the composer. +If you want to ignore a specific method, use the ignore option in the nested method object.

    Example

    {
    "paths": {
    "/users": {
    "ignore": true
    },
    "/users/{id}": {
    "get": { "ignore": true },
    "put": { "ignore": true }
    }
    }
    }
  • alias (string) - Use it create an alias for the route path. Original route path will be ignored.

    Example

    {
    "paths": {
    "/users": {
    "alias": "/customers"
    }
    }
    }
  • rename (string) - Use it to rename composed route response fields. +Use json schema format to describe the response structure. For now it works only for 200 response.

    Example

    {
    "paths": {
    "/users": {
    "responses": {
    "200": {
    "type": "array",
    "items": {
    "type": "object",
    "properties": {
    "id": { "rename": "user_id" },
    "name": { "rename": "first_name" }
    }
    }
    }
    }
    }
    }
    }

Examples

Composition of two remote services:

{
"composer": {
"services": [
{
"id": "auth-service",
"origin": "https://auth-service.com",
"openapi": {
"url": "/documentation/json",
"prefix": "auth"
}
},
{
"id": "payment-service",
"origin": "https://payment-service.com",
"openapi": {
"file": "./schemas/payment-service.json"
}
}
],
"refreshTimeout": 1000
}
}

Composition of two local services inside of Platformatic Runtime:

{
"composer": {
"services": [
{
"id": "auth-service",
"openapi": {
"url": "/documentation/json",
"prefix": "auth"
}
},
{
"id": "payment-service",
"openapi": {
"file": "./schemas/payment-service.json"
}
}
],
"refreshTimeout": 1000
}
}

telemetry

Open Telemetry is optionally supported with these settings:

  • serviceName (required, string) — Name of the service as will be reported in open telemetry.
  • version (string) — Optional version (free form)
  • skip (array). Optional list of operations to skip when exporting telemetry defined object with properties:
    • method: GET, POST, PUT, DELETE, PATCH, HEAD, OPTIONS, TRACE
    • path. e.g.: /documentation/json
  • exporter (object or array) — Exporter configuration. If not defined, the exporter defaults to console. If an array of objects is configured, every object must be a valid exporter object. The exporter object has the following properties:
    • type (string) — Exporter type. Supported values are console, otlp, zipkin and memory (default: console). memory is only supported for testing purposes.
    • options (object) — These options are supported:
      • url (string) — The URL to send the telemetry to. Required for otlp exporter. This has no effect on console and memory exporters.
      • headers (object) — Optional headers to send with the telemetry. This has no effect on console and memory exporters.

Note that OTLP traces can be consumed by different solutions, like Jaeger. Here the full list.

Example

{
"telemetry": {
"serviceName": "test-service",
"exporter": {
"type": "otlp",
"options": {
"url": "http://localhost:4318/v1/traces"
}
}
}
}

Environment variable placeholders

The value for any configuration setting can be replaced with an environment variable +by adding a placeholder in the configuration file, for example {PLT_SERVER_LOGGER_LEVEL}.

All placeholders in a configuration must be available as an environment variable +and must meet the allowed placeholder name rules.

Example

platformatic.service.json
{
"server": {
"port": "{PORT}"
}
}

Platformatic will replace the placeholders in this example with the environment +variables of the same name.

Setting environment variables

If a .env file exists it will automatically be loaded by Platformatic using +dotenv. For example:

.env
PLT_SERVER_LOGGER_LEVEL=info
PORT=8080

The .env file must be located in the same folder as the Platformatic configuration +file or in the current working directory.

Environment variables can also be set directly on the commmand line, for example:

PLT_SERVER_LOGGER_LEVEL=debug npx platformatic composer

Allowed placeholder names

Only placeholder names prefixed with PLT_, or that are in this allow list, will be +dynamically replaced in the configuration file:

  • PORT

This restriction is to avoid accidentally exposing system environment variables. +An error will be raised by Platformatic if it finds a configuration placeholder +that isn't allowed.

The default allow list can be extended by passing a --allow-env CLI option with a +comma separated list of strings, for example:

npx platformatic composer --allow-env=HOST,SERVER_LOGGER_LEVEL

If --allow-env is passed as an option to the CLI, it will be merged with the +default allow list.

+ + + + \ No newline at end of file diff --git a/docs/0.41.1/reference/composer/introduction/index.html b/docs/0.41.1/reference/composer/introduction/index.html new file mode 100644 index 00000000000..9983ed803b7 --- /dev/null +++ b/docs/0.41.1/reference/composer/introduction/index.html @@ -0,0 +1,22 @@ + + + + + +Platformatic Composer | Platformatic Open Source Software + + + + + +
+
Version: 0.41.1

Platformatic Composer

Platformatic Composer is an HTTP server that automatically aggregates multiple +services APIs into a single API.

info

Platformatic Composer is currently in public beta.

Features

Public beta

Platformatic Composer is in public beta. You can use it in production, but it's quite +likely that you'll encounter significant bugs.

If you run into a bug or have a suggestion for improvement, please +raise an issue on GitHub.

Standalone usage

If you're only interested in the features available in Platformatic Composer, you can replace platformatic with @platformatic/composer in the dependencies of your package.json, so that you'll import fewer deps.

Example configuration file

The following configuration file can be used to start a new Platformatic +Composer project. For more details on the configuration file, see the +configuration documentation.

{
"$schema": "https://platformatic.dev/schemas/v0.26.0/composer",
"server": {
"hostname": "127.0.0.1",
"port": 0,
"logger": {
"level": "info"
}
},
"composer": {
"services": [
{
"id": "auth-service",
"origin": "https://auth-service.com",
"openapi": {
"url": "/documentation/json",
"prefix": "auth"
}
},
{
"id": "payment-service",
"origin": "https://payment-service.com",
"openapi": {
"url": "/documentation/json"
}
}
],
"refreshTimeout": 1000
},
"watch": true
}
+ + + + \ No newline at end of file diff --git a/docs/0.41.1/reference/composer/plugin/index.html b/docs/0.41.1/reference/composer/plugin/index.html new file mode 100644 index 00000000000..0d47051d932 --- /dev/null +++ b/docs/0.41.1/reference/composer/plugin/index.html @@ -0,0 +1,18 @@ + + + + + +Plugin | Platformatic Open Source Software + + + + + +
+
Version: 0.41.1

Plugin

If you want to add features to a service, you will need to register a plugin, which will be in the form of a standard Fastify plugin.

The config file will specify where the plugin file is located as the example below:

{
...
"plugins": {
"paths": ["./plugin/index.js"]
}
}

The path is relative to the config file path.

You should export an async function which receives a parameters

  • app (FastifyInstance) that is the main fastify instance
  • opts all the options specified in the config file after path

Hot Reload

Plugin file is being watched by fs.watch function.

You don't need to reload Platformatic Composer server while working on your plugin. Every time you save, the watcher will trigger a reload event and the server will auto-restart and load your updated code.

tip

At this time, on Linux, file watch in subdirectories is not supported due to a Node.js limitation (documented here).

Directories

The path can also be a directory. In that case, the directory will be loaded with @fastify/autoload.

Consider the following directory structure:

├── routes
│ ├── foo
│ │ ├── something.js
│ │ └── bar
│ │ └── baz.js
│ ├── single-plugin
│ │ └── utils.js
│ └── another-plugin.js
└── platformatic.composer.json

By default the folder will be added as a prefix to all the routes defined within them. +See the autoload documentation for all the options to customize this behavior.

Multiple plugins

Multiple plugins can be loaded in parallel by specifying an array:

{
...
"plugins": {
"paths": [{
"path": "./plugin/index.js"
}, {
"path": "./routes/"
}]
}
}
+ + + + \ No newline at end of file diff --git a/docs/0.41.1/reference/composer/programmatic/index.html b/docs/0.41.1/reference/composer/programmatic/index.html new file mode 100644 index 00000000000..44f9c4af00d --- /dev/null +++ b/docs/0.41.1/reference/composer/programmatic/index.html @@ -0,0 +1,18 @@ + + + + + +Programmatic API | Platformatic Open Source Software + + + + + +
+
Version: 0.41.1

Programmatic API

In many cases it's useful to start Platformatic Composer using an API instead of +command line, e.g. in tests we want to start and stop our server.

The buildServer function allows that:

import { buildServer } from '@platformatic/composer'

const app = await buildServer('path/to/platformatic.composer.json')
await app.start()

const res = await fetch(app.url)
console.log(await res.json())

// do something

await app.close()

It is also possible to customize the configuration:

import { buildServer } from '@platformatic/composer'

const app = await buildServer({
server: {
hostname: '127.0.0.1',
port: 0
},
services: [
{
id: 'auth-service',
origin: 'https://auth-service.com',
openapi: {
url: '/documentation/json',
prefix: 'auth'
}
},
{
id: 'payment-service',
origin: 'https://payment-service.com',
openapi: {
file: './schemas/payment-service.json'
}
}
]
})

await app.start()

const res = await fetch(app.url)
console.log(await res.json())

// do something

await app.close()
+ + + + \ No newline at end of file diff --git a/docs/0.41.1/reference/db/authorization/introduction/index.html b/docs/0.41.1/reference/db/authorization/introduction/index.html new file mode 100644 index 00000000000..e15f87aab71 --- /dev/null +++ b/docs/0.41.1/reference/db/authorization/introduction/index.html @@ -0,0 +1,21 @@ + + + + + +Authorization | Platformatic Open Source Software + + + + + +
+
Version: 0.41.1

Authorization

Introduction

Authorization in Platformatic DB is role-based. User authentication and the +assignment of roles must be handled by an external authentication service.

Configuration

Authorization strategies and rules are configured via a Platformatic DB +configuration file. See the Platformatic DB Configuration +documentation for the supported settings.

Bypass authorization in development

To make testing and developing easier, it's possible to bypass authorization checks +if an adminSecret is set. See the HTTP headers (development only) documentation.

+ + + + \ No newline at end of file diff --git a/docs/0.41.1/reference/db/authorization/rules/index.html b/docs/0.41.1/reference/db/authorization/rules/index.html new file mode 100644 index 00000000000..5b06a8c8434 --- /dev/null +++ b/docs/0.41.1/reference/db/authorization/rules/index.html @@ -0,0 +1,28 @@ + + + + + +Rules | Platformatic Open Source Software + + + + + +
+
Version: 0.41.1

Rules

Introduction

Authorization rules can be defined to control what operations users are +able to execute via the REST or GraphQL APIs that are exposed by a Platformatic +DB app.

Every rule must specify:

  • role (required) — A role name. It's a string and must match with the role(s) set by an external authentication service.
  • entity (optional) — The Platformatic DB entity to apply this rule to.
  • entities (optional) — The Platformatic DB entities to apply this rule to.
  • defaults (optional) — Configure entity fields that will be +automatically set from user data.
  • One entry for each supported CRUD operation: find, save, delete

One of entity and entities must be specified.

Operation checks

Every entity operation — such as find, insert, save or delete — can have +authorization checks specified for them. This value can be false (operation disabled) +or true (operation enabled with no checks).

To specify more fine-grained authorization controls, add a checks field, e.g.:

{
"role": "user",
"entity": "page",
"find": {
"checks": {
"userId": "X-PLATFORMATIC-USER-ID"
}
},
...
}

In this example, when a user with a user role executes a findPage, they can +access all the data that has userId equal to the value in user metadata with +key X-PLATFORMATIC-USER-ID.

Note that "userId": "X-PLATFORMATIC-USER-ID" is syntactic sugar for:

      "find": {
"checks": {
"userId": {
"eq": "X-PLATFORMATIC-USER-ID"
}
}
}

It's possible to specify more complex rules using all the supported where clause operators.

Note that userId MUST exist as a field in the database table to use this feature.

GraphQL events and subscriptions

Platformatic DB supports GraphQL subscriptions and therefore db-authorization must protect them. +The check is performed based on the find permissions, the only permissions that are supported are:

  1. find: false, the subscription for that role is disabled
  2. find: { checks: { [prop]: 'X-PLATFORMATIC-PROP' } } validates that the given prop is equal
  3. find: { checks: { [prop]: { eq: 'X-PLATFORMATIC-PROP' } } } validates that the given prop is equal

Conflicting rules across roles for different equality checks will not be supported.

Restrict access to entity fields

If a fields array is present on an operation, Platformatic DB restricts the columns on which the user can execute to that list. +For save operations, the configuration must specify all the not-nullable fields (otherwise, it would fail at runtime). +Platformatic does these checks at startup.

Example:

    "rule": {
"entity": "page",
"role": "user",
"find": {
"checks": {
"userId": "X-PLATFORMATIC-USER-ID"
},
"fields": ["id", "title"]
}
...
}

In this case, only id and title are returned for a user with a user role on the page entity.

Set entity fields from user metadata

Defaults are used in database insert and are default fields added automatically populated from user metadata, e.g.:

        "defaults": {
"userId": "X-PLATFORMATIC-USER-ID"
},

When an entity is created, the userId column is used and populated using the value from user metadata.

Programmatic rules

If it's necessary to have more control over the authorizations, it's possible to specify the rules programmatically, e.g.:


app.register(auth, {
jwt: {
secret: 'supersecret'
},
rules: [{
role: 'user',
entity: 'page',
async find ({ user, ctx, where }) {
return {
...where,
userId: {
eq: user['X-PLATFORMATIC-USER-ID']
}
}
},
async delete ({ user, ctx, where }) {
return {
...where,
userId: {
eq: user['X-PLATFORMATIC-USER-ID']
}
}
},
defaults: {
userId: async function ({ user, ctx, input }) {
match(user, {
'X-PLATFORMATIC-USER-ID': generated.shift(),
'X-PLATFORMATIC-ROLE': 'user'
})
return user['X-PLATFORMATIC-USER-ID']
}

},
async save ({ user, ctx, where }) {
return {
...where,
userId: {
eq: user['X-PLATFORMATIC-USER-ID']
}
}
}
}]
})

In this example, the user role can delete all the posts edited before yesterday:

 app.register(auth, {
jwt: {
secret: 'supersecret'
},
roleKey: 'X-PLATFORMATIC-ROLE',
anonymousRole: 'anonymous',
rules: [{
role: 'user',
entity: 'page',
find: true,
save: true,
async delete ({ user, ctx, where }) {
return {
...where,
editedAt: {
lt: yesterday
}
}
},
defaults: {
userId: 'X-PLATFORMATIC-USER-ID'
}
}]
})

Access validation on entity mapper for plugins

To assert that a specific user with it's role(s) has the correct access rights to use entities on a platformatic plugin the context should be passed to the entity mapper in order to verify it's permissions like this:

//plugin.js

app.post('/', async (req, reply) => {
const ctx = req.createPlatformaticCtx()

await app.platformatic.entities.movie.find({
where: { /*...*/ },
ctx
})
})

Skip authorization rules

In custom plugins, it's possible to skip the authorization rules on entities programmatically by setting the skipAuth flag to true or not passing a ctx, e.g.:

// this works even if the user's role doesn't have the `find` permission.
const result = await app.platformatic.entities.page.find({skipAuth: true, ...})

This has the same effect:

// this works even if the user's role doesn't have the `find` permission
const result = await app.platformatic.entities.page.find() // no `ctx`

This is useful for custom plugins for which the authentication is not necessary, so there is no user role set when invoked.

info

Skip authorization rules is not possible on the automatically generated REST and GraphQL APIs.

Avoid repetition of the same rule multiple times

Very often we end up writing the same rules over and over again. +Instead, it's possible to condense the rule for multiple entities on a single entry:

 app.register(auth, {
jwt: {
secret: 'supersecret'
},
roleKey: 'X-PLATFORMATIC-ROLE',
anonymousRole: 'anonymous',
rules: [{
role: 'anonymous',
entities: ['category', 'page'],
find: true,
delete: false,
save: false
}]
})
+ + + + \ No newline at end of file diff --git a/docs/0.41.1/reference/db/authorization/strategies/index.html b/docs/0.41.1/reference/db/authorization/strategies/index.html new file mode 100644 index 00000000000..ed66cc126b0 --- /dev/null +++ b/docs/0.41.1/reference/db/authorization/strategies/index.html @@ -0,0 +1,40 @@ + + + + + +Strategies | Platformatic Open Source Software + + + + + +
+
Version: 0.41.1

Strategies

Introduction

Platformatic DB supports the following authorization strategies:

JSON Web Token (JWT)

The JSON Web Token (JWT) authorization strategy is built on top +of the @fastify/jwt Fastify plugin.

Platformatic DB JWT integration

To configure it, the quickest way is to pass a shared secret in your +Platformatic DB configuration file, for example:

platformatic.db.json
{
"authorization": {
"jwt": {
"secret": "<shared-secret>"
}
}
}

By default @fastify/jwt looks for a JWT in an HTTP request's Authorization +header. This requires HTTP requests to the Platformatic DB API to include an +Authorization header like this:

Authorization: Bearer <token>

See the @fastify/jwt documentation +for all of the available configuration options.

JSON Web Key Sets (JWKS)

The JWT authorization strategy includes support for JSON Web Key Sets.

To configure it:

platformatic.db.json
{
"authorization": {
"jwt": {
"jwks": {
"allowedDomains": [
"https://ISSUER_DOMAIN"
]
}
}
}
}

When a JSON Web Token is included in a request to Platformatic DB, it retrieves the +correct public key from https:/ISSUER_DOMAIN/.well-known/jwks.json and uses it to +verify the JWT signature. The token carries all the informations, like the kid, +which is the key id used to sign the token itself, so no other configuration is required.

JWKS can be enabled without any options:

platformatic.db.json
{
"authorization": {
"jwt": {
"jwks": true
}
}
}

When configured like this, the JWK URL is calculated from the iss (issuer) field of JWT, so +every JWT token from an issuer that exposes a valid JWKS token will pass the validation. +This configuration should only be used in development, while +in every other case the allowedDomains option should be specified.

Any option supported by the get-jwks +library can be specified in the authorization.jwt.jwks object.

JWT Custom Claim Namespace

JWT claims can be namespaced to avoid name collisions. If so, we will receive tokens +with custom claims such as: https://platformatic.dev/X-PLATFORMATIC-ROLE +(where https://platformatic.cloud/ is the namespace). +If we want to map these claims to user metadata removing our namespace, we can +specify the namespace in the JWT options:

platformatic.db.json
{
"authorization": {
"jwt": {
"namespace": "https://platformatic.dev/"
}
}
}

With this configuration, the https://platformatic.dev/X-PLATFORMATIC-ROLE claim +is mapped to X-PLATFORMATIC-ROLE user metadata.

Webhook

Platformatic DB can use a webhook to authenticate requests.

Platformatic DB Webhook integration

In this case, the URL is configured on authorization:

platformatic.db.json
{
"authorization": {
"webhook": {
"url": "<webhook url>"
}
}
}

When a request is received, Platformatic sends a POST to the webhook, replicating +the same body and headers, except for:

  • host
  • connection

In the Webhook case, the HTTP response contains the roles/user information as HTTP headers.

HTTP headers (development only)

danger

Passing an admin API key via HTTP headers is highly insecure and should only be used +during development or within protected networks.

If a request has X-PLATFORMATIC-ADMIN-SECRET HTTP header set with a valid adminSecret +(see configuration reference) the +role is set automatically as platformatic-admin, unless a different role is set for +user impersonation (which is disabled if JWT or Webhook are set, see below).

Platformatic DB HTTP Headers

Also, the following rule is automatically added to every entity, allowing the user +that presented the adminSecret to perform any operation on any entity:

{
"role": "platformatic-admin",
"find": false,
"delete": false,
"save": false
}
+ + + + \ No newline at end of file diff --git a/docs/0.41.1/reference/db/authorization/user-roles-metadata/index.html b/docs/0.41.1/reference/db/authorization/user-roles-metadata/index.html new file mode 100644 index 00000000000..f24eacaa63e --- /dev/null +++ b/docs/0.41.1/reference/db/authorization/user-roles-metadata/index.html @@ -0,0 +1,31 @@ + + + + + +User Roles & Metadata | Platformatic Open Source Software + + + + + +
+
Version: 0.41.1

User Roles & Metadata

Introduction

Roles and user information are passed to Platformatic DB from an external +authentication service as a string (JWT claims or HTTP headers). We refer to +this data as user metadata.

Roles

Users can have a list of roles associated with them. These roles can be specified +in an X-PLATFORMATIC-ROLE property as a list of comma separated role names +(the key name is configurable).

Note that role names are just strings.

Reserved roles

Some special role names are reserved by Platformatic DB:

  • platformatic-admin : this identifies a user who has admin powers
  • anonymous: set automatically when no roles are associated

Anonymous role

If a user has no role, the anonymous role is assigned automatically. It's possible +to specify rules to apply to users with this role:

    {
"role": "anonymous",
"entity": "page",
"find": false,
"delete": false,
"save": false
}

In this case, a user that has no role or explicitly has the anonymous role +cannot perform any operations on the page entity.

Role impersonation

If a request includes a valid X-PLATFORMATIC-ADMIN-SECRET HTTP header it is +possible to impersonate a user roles. The roles to impersonate can be specified +by sending a X-PLATFORMATIC-ROLE HTTP header containing a comma separated list +of roles.

note

When JWT or Webhook are set, user role impersonation is not enabled, and the role +is always set as platfomatic-admin automatically if the X-PLATFORMATIC-ADMIN-SECRET +HTTP header is specified.

Role configuration

The roles key in user metadata defaults to X-PLATFORMATIC-ROLE. It's possible to change it using the roleKey field in configuration. Same for the anonymous role, which value can be changed using anonymousRole.

 "authorization": {
"roleKey": "X-MYCUSTOM-ROLE_KEY",
"anonymousRole": "anonym",
"rules": [
...
]
}

User metadata

User roles and other user data, such as userId, are referred to by Platformatic +DB as user metadata.

User metadata is parsed from an HTTP request and stored in a user object on the +Fastify request object. This object is populated on-demand, but it's possible +to populate it explicity with await request.setupDBAuthorizationUser().

+ + + + \ No newline at end of file diff --git a/docs/0.41.1/reference/db/configuration/index.html b/docs/0.41.1/reference/db/configuration/index.html new file mode 100644 index 00000000000..262f3508a8d --- /dev/null +++ b/docs/0.41.1/reference/db/configuration/index.html @@ -0,0 +1,59 @@ + + + + + +Configuration | Platformatic Open Source Software + + + + + +
+
Version: 0.41.1

Configuration

Platformatic DB is configured with a configuration file. It supports the use +of environment variables as setting values with configuration placeholders.

Configuration file

If the Platformatic CLI finds a file in the current working directory matching +one of these filenames, it will automatically load it:

  • platformatic.db.json
  • platformatic.db.json5
  • platformatic.db.yml or platformatic.db.yaml
  • platformatic.db.tml or platformatic.db.toml

Alternatively, a --config option with a configuration +filepath can be passed to most platformatic db CLI commands.

The configuration examples in this reference use JSON.

Supported formats

FormatExtensions
JSON.json
JSON5.json5
YAML.yml, .yaml
TOML.tml

Comments are supported by the JSON5, YAML and TOML file formats.

Settings

Configuration settings are organised into the following groups:

Sensitive configuration settings, such as a database connection URL that contains +a password, should be set using configuration placeholders.

db

A required object with the following settings:

  • connectionString (required, string) — Database connection URL.

    • Example: postgres://user:password@my-database:5432/db-name
  • schema (array of string) - Currently supported only for postgres, schemas used tolook for entities. If not provided, the default public schema is used.

    Examples

  "db": {
"connectionString": "(...)",
"schema": [
"schema1", "schema2"
],
...

},

  • Platformatic DB supports MySQL, MariaDB, PostgreSQL and SQLite.

  • graphql (boolean or object, default: true) — Controls the GraphQL API interface, with optional GraphiQL UI.

    Examples

    Enables GraphQL support

    {
    "db": {
    ...
    "graphql": true
    }
    }

    Enables GraphQL support with GraphiQL

    {
    "db": {
    ...
    "graphql": {
    "graphiql": true
    }
    }
    }

    It's possible to selectively ignore entites:

    {
    "db": {
    ...
    "graphql": {
    "ignore": {
    "categories": true
    }
    }
    }
    }

    It's possible to selectively ignore fields:

    {
    "db": {
    ...
    "graphql": {
    "ignore": {
    "categories": {
    "name": true
    }
    }
    }
    }
    }

    It's possible to add a custom GraphQL schema during the startup:

    {
    "db": {
    ...
    "graphql": {
    "schemaPath": "path/to/schema.graphql"
    }
    }
    }
    }
  • openapi (boolean or object, default: true) — Enables OpenAPI REST support.

    • If value is an object, all OpenAPI v3 allowed properties can be passed. Also a prefix property can be passed to set the OpenAPI prefix.
    • Platformatic DB uses @fastify/swagger under the hood to manage this configuration.

    Examples

    Enables OpenAPI

    {
    "db": {
    ...
    "openapi": true
    }
    }

    Enables OpenAPI with prefix

    {
    "db": {
    ...
    "openapi": {
    "prefix": "/api"
    }
    }
    }

    Enables OpenAPI with options

    {
    "db": {
    ...
    "openapi": {
    "info": {
    "title": "Platformatic DB",
    "description": "Exposing a SQL database as REST"
    }
    }
    }
    }

    You can for example add the security section, so that Swagger will allow you to add the authentication header to your requests. +In the following code snippet, we're adding a Bearer token in the form of a JWT:

    {
    "db": {
    ...
    "openapi": {
    ...
    "security": [{ "bearerAuth": [] }],
    "components": {
    "securitySchemes": {
    "bearerAuth": {
    "type": "http",
    "scheme": "bearer",
    "bearerFormat": "JWT"
    }
    }
    }
    }
    }
    }

    It's possible to selectively ignore entites:

    {
    "db": {
    ...
    "openapi": {
    "ignore": {
    "categories": true
    }
    }
    }
    }

    It's possible to selectively ignore fields:

    {
    "db": {
    ...
    "openapi": {
    "ignore": {
    "categories": {
    "name": true
    }
    }
    }
    }
    }
  • ignore (object) — Key/value object that defines which database tables should not be mapped as API entities.

    Examples

    {
    "db": {
    ...
    "ignore": {
    "versions": true // "versions" table will be not mapped with GraphQL/REST APIs
    }
    }
    }
  • events (boolean or object, default: true) — Controls the support for events published by the SQL mapping layer. +If enabled, this option add support for GraphQL Subscription over WebSocket. By default it uses an in-process message broker. +It's possible to configure it to use Redis instead.

    Examples

    {
    "db": {
    ...
    "events": {
    "connectionString": "redis://:password@redishost.com:6380/"
    }
    }
    }
  • schemalock (boolean or object, default: false) — Controls the caching of the database schema on disk. +If set to true the database schema metadata is stored inside a schema.lock file. +It's also possible to configure the location of that file by specifying a path, like so:

    Examples

    {
    "db": {
    ...
    "schemalock": {
    "path": "./dbmetadata"
    }
    }
    }

    Starting Platformatic DB or running a migration will automatically create the schemalock file.

metrics

Configuration for a Prometheus server that will export monitoring metrics +for the current server instance. It uses fastify-metrics +under the hood.

This setting can be a boolean or an object. If set to true the Prometheus server will listen on http://0.0.0.0:9090.

Supported object properties:

  • hostname (string) — The hostname where Prometheus server will listen for connections.
  • port (number) — The port where Prometheus server will listen for connections.
  • auth (object) — Basic Auth configuration. username and password are required here +(use environment variables).

migrations

Configures Postgrator to run migrations against the database.

An optional object with the following settings:

  • dir (required, string): Relative path to the migrations directory.
  • autoApply (boolean, default: false): Automatically apply migrations when Platformatic DB server starts.

plugins

An optional object that defines the plugins loaded by Platformatic DB.

  • paths (required, array): an array of paths (string) +or an array of objects composed as follows,
    • path (string): Relative path to plugin's entry point.
    • options (object): Optional plugin options.
    • encapsulate (boolean): if the path is a folder, it instruct Platformatic to not +encapsulate those plugins, +allowing decorators and hooks to be shared across all routes.
    • maxDepth (integer): if the path is a folder, it limits the depth to load the content from.
  • typescript (boolean or object): enable TypeScript compilation. A tsconfig.json file is required in the same folder.
{
"plugins": {
"paths": [{
"path": "./my-plugin.js",
"options": {
"foo": "bar"
}
}]
}
}

typescript compilation options

The typescript option can also be an object to customize the compilation. Here are the supported options:

  • enabled (boolean): enables compilation
  • tsConfig (string): path to the tsconfig.json file relative to the configuration
  • outDir (string): the output directory of tsconfig.json, in case tsconfig.json is not available +and and enabled is set to false (procution build)
  • flags (array of string): flags to be passed to tsc. Overrides tsConfig. +

Example:

{
"plugins": {
"paths": [{
"path": "./my-plugin.js",
"options": {
"foo": "bar"
}
}],
"typescript": {
"enabled": false,
"tsConfig": "./path/to/tsconfig.json",
"outDir": "dist"
}
}
}

watch

Disable watching for file changes if set to false. It can also be customized with the following options:

  • ignore (string[], default: null): List of glob patterns to ignore when watching for changes. If null or not specified, ignore rule is not applied. Ignore option doesn't work for typescript files.

  • allow (string[], default: ['*.js', '**/*.js']): List of glob patterns to allow when watching for changes. If null or not specified, allow rule is not applied. Allow option doesn't work for typescript files.

    Example

    {
    "watch": {
    "ignore": ["*.mjs", "**/*.mjs"],
    "allow": ["my-plugin.js", "plugins/*.js"]
    }
    }

server

A required object with the following settings:

  • hostname (required, string) — Hostname where Platformatic DB server will listen for connections.

  • port (required, number) — Port where Platformatic DB server will listen for connections.

  • healthCheck (boolean or object) — Enables the health check endpoint.

    • Powered by @fastify/under-pressure.
    • The value can be an object, used to specify the interval between checks in milliseconds (default: 5000)

    Example

    {
    "server": {
    ...
    "healthCheck": {
    "interval": 2000
    }
    }
    }
  • cors (object) — Configuration for Cross-Origin Resource Sharing (CORS) headers.

    • All options will be passed to the @fastify/cors plugin. In order to specify a RegExp object, you can pass { regexp: 'yourregexp' }, +it will be automatically converted
  • https (object) - Configuration for HTTPS supporting the following options.

    • key (required, string, object, or array) - If key is a string, it specifies the private key to be used. If key is an object, it must have a path property specifying the private key file. Multiple keys are supported by passing an array of keys.
    • cert (required, string, object, or array) - If cert is a string, it specifies the certificate to be used. If cert is an object, it must have a path property specifying the certificate file. Multiple certificates are supported by passing an array of keys.
  • logger (object) -- the logger configuration.

  • pluginTimeout (integer) -- the number of milliseconds to wait for a Fastify plugin to load

  • bodyLimit (integer) -- the maximum request body size in bytes

  • maxParamLength (integer) -- the maximum length of a request parameter

  • caseSensitive (boolean) -- if true, the router will be case sensitive

  • ignoreTrailingSlash (boolean) -- if true, the router will ignore the trailing slash

  • ignoreTrailingSlash (boolean) -- if true, the router will ignore the trailing slash

  • connectionTimeout (integer) -- the milliseconds to wait for a new HTTP request

  • keepAliveTimeout (integer) -- the milliseconds to wait for a keep-alive HTTP request

  • maxRequestsPerSocket (integer) -- the maximum number of requests per socket

  • forceCloseConnections (boolean or "idle") -- if true, the server will close all connections when it is closed

  • requestTimeout (integer) -- the milliseconds to wait for a request to be completed

  • disableRequestLogging (boolean) -- if true, the request logger will be disabled

  • exposeHeadRoutes (boolean) -- if true, the router will expose HEAD routes

  • serializerOpts (object) -- the serializer options

  • requestIdHeader (string or false) -- the name of the header that will contain the request id

  • requestIdLogLabel (string) -- Defines the label used for the request identifier when logging the request. default: 'reqId'

  • jsonShorthand (boolean) -- default: true -- visit fastify docs for more details

  • trustProxy (boolean or integer or string or String[]) -- default: false -- visit fastify docs for more details

tip

See the fastify docs for more details.

authorization

An optional object with the following settings:

  • adminSecret (string): A secret that should be sent in an +x-platformatic-admin-secret HTTP header when performing GraphQL/REST API +calls. Use an environment variable placeholder +to securely provide the value for this setting.
  • roleKey (string, default: X-PLATFORMATIC-ROLE): The name of the key in user +metadata that is used to store the user's roles. See Role configuration.
  • anonymousRole (string, default: anonymous): The name of the anonymous role. See Role configuration.
  • jwt (object): Configuration for the JWT authorization strategy. +Any option accepted by @fastify/jwt +can be passed in this object.
  • webhook (object): Configuration for the Webhook authorization strategy.
    • url (required, string): Webhook URL that Platformatic DB will make a +POST request to.
  • rules (array): Authorization rules that describe the CRUD actions that +users are allowed to perform against entities. See Rules +documentation.
note

If an authorization object is present, but no rules are specified, no CRUD +operations are allowed unless adminSecret is passed.

Example

platformatic.db.json
{
"authorization": {
"jwt": {
"secret": "{PLT_AUTHORIZATION_JWT_SECRET}"
},
"rules": [
...
]
}
}

telemetry

Open Telemetry is optionally supported with these settings:

  • serviceName (required, string) — Name of the service as will be reported in open telemetry.
  • version (string) — Optional version (free form)
  • skip (array). Optional list of operations to skip when exporting telemetry defined object with properties:
    • method: GET, POST, PUT, DELETE, PATCH, HEAD, OPTIONS, TRACE
    • path. e.g.: /documentation/json
  • exporter (object or array) — Exporter configuration. If not defined, the exporter defaults to console. If an array of objects is configured, every object must be a valid exporter object. The exporter object has the following properties:
    • type (string) — Exporter type. Supported values are console, otlp, zipkin and memory (default: console). memory is only supported for testing purposes.
    • options (object) — These options are supported:
      • url (string) — The URL to send the telemetry to. Required for otlp exporter. This has no effect on console and memory exporters.
      • headers (object) — Optional headers to send with the telemetry. This has no effect on console and memory exporters.

Note that OTLP traces can be consumed by different solutions, like Jaeger. Here the full list.

Example

{
"telemetry": {
"serviceName": "test-service",
"exporter": {
"type": "otlp",
"options": {
"url": "http://localhost:4318/v1/traces"
}
}
}
}

Environment variable placeholders

The value for any configuration setting can be replaced with an environment variable +by adding a placeholder in the configuration file, for example {PLT_SERVER_LOGGER_LEVEL}.

All placeholders in a configuration must be available as an environment variable +and must meet the allowed placeholder name rules.

Example

platformatic.db.json
{
"db": {
"connectionString": "{DATABASE_URL}"
},
"server": {
"logger": {
"level": "{PLT_SERVER_LOGGER_LEVEL}"
},
"port": "{PORT}"
}
}

Platformatic will replace the placeholders in this example with the environment +variables of the same name.

Setting environment variables

If a .env file exists it will automatically be loaded by Platformatic using +dotenv. For example:

.env
PLT_SERVER_LOGGER_LEVEL=info
PORT=8080

The .env file must be located in the same folder as the Platformatic configuration +file or in the current working directory.

Environment variables can also be set directly on the commmand line, for example:

PLT_SERVER_LOGGER_LEVEL=debug npx platformatic db

Allowed placeholder names

Only placeholder names prefixed with PLT_, or that are in this allow list, will be +dynamically replaced in the configuration file:

  • PORT
  • DATABASE_URL

This restriction is to avoid accidentally exposing system environment variables. +An error will be raised by Platformatic if it finds a configuration placeholder +that isn't allowed.

The default allow list can be extended by passing a --allow-env CLI option with a +comma separated list of strings, for example:

npx platformatic db start --allow-env=HOST,SERVER_LOGGER_LEVEL
# OR
npx platformatic start --allow-env=HOST,SERVER_LOGGER_LEVEL

If --allow-env is passed as an option to the CLI, it will be merged with the +default allow list.

Sample Configuration

This is a bare minimum configuration for Platformatic DB. Uses a local ./db.sqlite SQLite database, with OpenAPI and GraphQL support.

Server will listen to http://127.0.0.1:3042

{
"server": {
"hostname": "127.0.0.1",
"port": "3042"
},
"db": {
"connectionString": "sqlite://./db.sqlite",
"graphiql": true,
"openapi": true,
"graphql": true
}
}
+ + + + \ No newline at end of file diff --git a/docs/0.41.1/reference/db/introduction/index.html b/docs/0.41.1/reference/db/introduction/index.html new file mode 100644 index 00000000000..6a6f02b0b38 --- /dev/null +++ b/docs/0.41.1/reference/db/introduction/index.html @@ -0,0 +1,25 @@ + + + + + +Platformatic DB | Platformatic Open Source Software + + + + + +
+
Version: 0.41.1

Platformatic DB

Platformatic DB is an HTTP server that provides a flexible set of tools for +building robust APIs with Node.js.

For a high level overview of how Platformatic DB works, please reference the +Architecture guide.

info

Platformatic DB is currently in public beta.

Features

info

Get up and running in 2 minutes using our +Quick Start Guide

Supported databases

DatabaseVersion
SQLite3.
PostgreSQL>= 15
MySQL>= 5.7
MariaDB>= 10.11

The required database driver is automatically inferred and loaded based on the +value of the connectionString +configuration setting.

Public beta

Platformatic DB is in public beta. You can use it in production, but it's quite +likely that you'll encounter significant bugs.

If you run into a bug or have a suggestion for improvement, please +raise an issue on GitHub.

+ + + + \ No newline at end of file diff --git a/docs/0.41.1/reference/db/logging/index.html b/docs/0.41.1/reference/db/logging/index.html new file mode 100644 index 00000000000..4a3b271c328 --- /dev/null +++ b/docs/0.41.1/reference/db/logging/index.html @@ -0,0 +1,25 @@ + + + + + +Logging | Platformatic Open Source Software + + + + + +
+
Version: 0.41.1

Logging

Platformatic DB uses a low overhead logger named Pino +to output structured log messages.

Logger output level

By default the logger output level is set to info, meaning that all log messages +with a level of info or above will be output by the logger. See the +Pino documentation +for details on the supported log levels.

The logger output level can be overriden by adding a logger object to the server +configuration settings group:

platformatic.db.json
{
"server": {
"logger": {
"level": "error"
},
...
},
...
}

Log formatting

If you run Platformatic DB in a terminal, where standard out (stdout) +is a TTY:

  • pino-pretty is automatically used +to pretty print the logs and make them easier to read during development.
  • The Platformatic logo is printed (if colors are supported in the terminal emulator)

Example:

$ npx platformatic db start




/////////////
///// /////
/// ///
/// ///
/// ///
&& /// /// &&
&&&&&& /// /// &&&&&&
&&&& /// /// &&&&
&&& /// /// &&&&&&&&&&&&
&&& /// /////// //// && &&&&&
&& /// /////////////// &&&
&&& /// /// &&&
&&& /// // &&
&&& /// &&
&&& /// &&&
&&&& /// &&&
&&&&& /// &&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&
///
///
///
///
///
///


[11:20:33.466] INFO (337606): server listening
url: "http://127.0.0.1:3042"

If stdout is redirected to a non-TTY, the logo is not printed and the logs are +formatted as newline-delimited JSON:

$ npx platformatic db start | head
{"level":30,"time":1665566628973,"pid":338365,"hostname":"darkav2","url":"http://127.0.0.1:3042","msg":"server listening"}
+ + + + \ No newline at end of file diff --git a/docs/0.41.1/reference/db/migrations/index.html b/docs/0.41.1/reference/db/migrations/index.html new file mode 100644 index 00000000000..79ef5809e18 --- /dev/null +++ b/docs/0.41.1/reference/db/migrations/index.html @@ -0,0 +1,17 @@ + + + + + +Migrations | Platformatic Open Source Software + + + + + +
+
Version: 0.41.1

Migrations

It uses Postgrator under the hood to run migrations. Please refer to the Postgrator documentation for guidance on writing migration files.

In brief, you should create a file structure like this

migrations/
|- 001.do.sql
|- 001.undo.sql
|- 002.do.sql
|- 002.undo.sql
|- 003.do.sql
|- 003.undo.sql
|- 004.do.sql
|- 004.undo.sql
|- ... and so on

Postgrator uses a table in your schema, to store which migrations have been already processed, so that only new ones will be applied at every server start.

You can always rollback some migrations specifing what version you would like to rollback to.

Example

$ platformatic db migrations apply --to 002

Will execute 004.undo.sql, 003.undo.sql in this order. If you keep those files in migrations directory, when the server restarts it will execute 003.do.sql and 004.do.sql in this order if the autoApply value is true, or you can run the db migrations apply command.

It's also possible to rollback a single migration with -r:

$ platformatic db migrations apply -r 

How to run migrations

There are two ways to run migrations in Platformatic DB. They can be processed automatically when the server starts if the autoApply value is true, or you can just run the db migrations apply command.

In both cases you have to edit your config file to tell Platformatic DB where are your migration files.

Automatically on server start

To run migrations when Platformatic DB starts, you need to use the config file root property migrations.

There are two options in the "migrations" property

  • dir (required) the directory where the migration files are located. It will be relative to the config file path.
  • autoApply a boolean value that tells Platformatic DB to auto-apply migrations or not (default: false)

Example

{
...
"migrations": {
"dir": "./path/to/migrations/folder",
"autoApply": false
}
}

Manually with the CLI

See documentation about db migrations apply command

In short:

  • be sure to define a correct migrations.dir folder under the config on platformatic.db.json
  • get the MIGRATION_NUMBER (f.e. if the file is named 002.do.sql will be 002)
  • run npx platformatic db migrations apply --to MIGRATION_NUMBER
+ + + + \ No newline at end of file diff --git a/docs/0.41.1/reference/db/plugin/index.html b/docs/0.41.1/reference/db/plugin/index.html new file mode 100644 index 00000000000..eac0409f449 --- /dev/null +++ b/docs/0.41.1/reference/db/plugin/index.html @@ -0,0 +1,19 @@ + + + + + +Plugin | Platformatic Open Source Software + + + + + +
+
Version: 0.41.1

Plugin

If you want to extend Platformatic DB features, it is possible to register a plugin, which will be in the form of a standard Fastify plugin.

The config file will specify where the plugin file is located as the example below:

{
...
"plugins": {
"paths": ["./plugin/index.js"]
}
}

The paths are relative to the config file path.

Once the config file is set up, you can write your plugin to extend Platformatic DB API or write your custom business logic.

You should export an async function which receives a parameters

  • app (FastifyInstance) that is the main fastify instance running Platformatic DB
  • opts all the options specified in the config file after path
  • You can always access Platformatic data mapper through app.platformatic property.
info

To make sure that a user has the appropriate set of permissions to perform any action on an entity the context should be passed to the entity mapper operation like this:

app.post('/', async (req, reply) => {
const ctx = req.createPlatformaticCtx()

await app.platformatic.entities.movies.find({
where: { /*...*/ },
ctx
})
})

Check some examples.

Hot Reload

Plugin file is being watched by fs.watch function.

You don't need to reload Platformatic DB server while working on your plugin. Every time you save, the watcher will trigger a reload event and the server will auto-restart and load your updated code.

tip

At this time, on Linux, file watch in subdirectories is not supported due to a Node.js limitation (documented here).

Directories

The path can also be a directory. In that case, the directory will be loaded with @fastify/autoload.

Consider the following directory structure:

├── routes
│ ├── foo
│ │ ├── something.js
│ │ └── bar
│ │ └── baz.js
│ ├── single-plugin
│ │ └── utils.js
│ └── another-plugin.js
└── platformatic.service.json

By default the folder will be added as a prefix to all the routes defined within them. +See the autoload documentation for all the options to customize this behavior.

Multiple plugins

Multiple plugins can be loaded in parallel by specifying an array:

{
...
"plugins": {
"paths": [{
"path": "./plugin/index.js"
}, {
"path": "./routes/"
}]
}
}

fastify.swagger()

TypeScript and autocompletion

If you want to access any of the types provided by Platformatic DB, generate them using the platformatic db types command. +This will create a global.d.ts file that you can now import everywhere, like so:

/// <references <types="./global.d.ts" />

Remember to adjust the path to global.d.ts.

Plugin definition with TypeScript

Here is an example of writing a plugin in TypeScript:

/// <reference types="./global.d.ts" />
import { FastifyInstance, FastifyPluginOptions } from 'fastify'

export default async function (fastify: FastifyInstance, opts: FastifyPluginOptions) {
}

Note that you need to add the "plugins": { "typescript": true } configuration to your platformatic.service.json.

+ + + + \ No newline at end of file diff --git a/docs/0.41.1/reference/db/programmatic/index.html b/docs/0.41.1/reference/db/programmatic/index.html new file mode 100644 index 00000000000..50edb44dc55 --- /dev/null +++ b/docs/0.41.1/reference/db/programmatic/index.html @@ -0,0 +1,17 @@ + + + + + +Programmatic API | Platformatic Open Source Software + + + + + +
+
Version: 0.41.1

Programmatic API

It's possible to start an instance of Platformatic DB from JavaScript.

import { buildServer } from '@platformatic/db'

const app = await buildServer('/path/to/platformatic.db.json')

await app.start() // this will start our server

console.log('URL', app.url)

const res = await fetch(app.url)
console.log(await res.json())

// do something

await app.close()

It is also possible to customize the configuration:

import { buildServer } from '@platformatic/db'

const app = await buildServer({
server: {
hostname: '127.0.0.1',
port: 0
},
db: {
connectionString: 'sqlite://test.sqlite'
},
})

await app.start() // this will start our server

console.log('URL', app.url)

const res = await fetch(app.url)
console.log(await res.json())

// do something

await app.close()

For more details on how this is implemented, read Platformatic Service Programmatic API.

API

buildServer(config)

Returns an instance of the restartable application

RestartableApp

.start()

Listen to the hostname/port combination specified in the config.

.restart()

Restart the Fastify application

.close()

Stops the application.

+ + + + \ No newline at end of file diff --git a/docs/0.41.1/reference/db/schema-support/index.html b/docs/0.41.1/reference/db/schema-support/index.html new file mode 100644 index 00000000000..46effc337f6 --- /dev/null +++ b/docs/0.41.1/reference/db/schema-support/index.html @@ -0,0 +1,21 @@ + + + + + +Schema support | Platformatic Open Source Software + + + + + +
+
Version: 0.41.1

Schema support

It's possible to specify the schemas where the tables are located (if the database supports schemas). +PlatformaticDB will inspect this schemas to create the entities

Example

CREATE SCHEMA IF NOT EXISTS "test1";
CREATE TABLE IF NOT EXISTS test1.movies (
id INTEGER PRIMARY KEY,
title TEXT NOT NULL
);

CREATE SCHEMA IF NOT EXISTS "test2";
CREATE TABLE IF NOT EXISTS test2.users (
id INTEGER PRIMARY KEY,
title TEXT NOT NULL
);

The schemas must be specified in configuration in the schema section. +Note that if we use schemas and migrations, we must specify the schema in the migrations table as well +(with postgresql, we assume we use the default public schema).

  ...
"db": {
"connectionString": "(...)",
"schema": [
"test1", "test2"
],
"ignore": {
"versions": true
}
},
"migrations": {
"dir": "migrations",
"table": "test1.versions"
},

...

The entities name are then generated in the form schemaName + entityName, PascalCase (this is necessary to avoid name collisions in case there are tables with same name in different schemas). +So for instance for the example above we generate the Test1Movie and Test2User entities.

info

Please pay attention to the entity names when using schema, these are also used to setup authorization rules

+ + + + \ No newline at end of file diff --git a/docs/0.41.1/reference/runtime/configuration/index.html b/docs/0.41.1/reference/runtime/configuration/index.html new file mode 100644 index 00000000000..57ea1cac4d6 --- /dev/null +++ b/docs/0.41.1/reference/runtime/configuration/index.html @@ -0,0 +1,67 @@ + + + + + +Configuration | Platformatic Open Source Software + + + + + +
+
Version: 0.41.1

Configuration

Platformatic Runtime is configured with a configuration file. It supports the +use of environment variables as setting values with configuration placeholders.

Configuration file

If the Platformatic CLI finds a file in the current working directory matching +one of these filenames, it will automatically load it:

  • platformatic.runtime.json
  • platformatic.runtime.json5
  • platformatic.runtime.yml or platformatic.runtime.yaml
  • platformatic.runtime.tml or platformatic.runtime.toml

Alternatively, a --config option with a configuration +filepath can be passed to most platformatic runtime CLI commands.

The configuration examples in this reference use JSON.

Supported formats

FormatExtensions
JSON.json
JSON5.json5
YAML.yml, .yaml
TOML.tml

Comments are supported by the JSON5, YAML and TOML file formats.

Settings

Configuration settings are organized into the following groups:

Configuration settings containing sensitive data should be set using +configuration placeholders.

The autoload and services settings can be used together, but at least one +of them must be provided. When the configuration file is parsed, autoload +configuration is translated into services configuration.

autoload

The autoload configuration is intended to be used with monorepo applications. +autoload is an object with the following settings:

  • path (required, string) - The path to a directory containing the +microservices to load. In a traditional monorepo application, this directory is +typically named packages.
  • exclude (array of strings) - Child directories inside of path that +should not be processed.
  • mappings (object) - Each microservice is given an ID and is expected +to have a Platformatic configuration file. By default the ID is the +microservice's directory name, and the configuration file is expected to be a +well-known Platformatic configuration file. mappings can be used to override +these default values.
    • id (required, string) - The overridden ID. This becomes the new +microservice ID.
    • config (required**, string) - The overridden configuration file +name. This is the file that will be used when starting the microservice.

services

services is an array of objects that defines the microservices managed by the +runtime. Each service object supports the following settings:

  • id (required, string) - A unique identifier for the microservice. +When working with the Platformatic Composer, this value corresponds to the id +property of each object in the services section of the config file. When +working with client objects, this corresponds to the optional serviceId +property or the name field in the client's package.json file if a +serviceId is not explicitly provided.
  • path (required, string) - The path to the directory containing +the microservice.
  • config (required, string) - The configuration file used to start +the microservice.

entrypoint

The Platformatic Runtime's entrypoint is a microservice that is exposed +publicly. This value must be the ID of a service defined via the autoload or +services configuration.

hotReload

An optional boolean, defaulting to false, indicating if hot reloading should +be enabled for the runtime. If this value is set to false, it will disable +hot reloading for any microservices managed by the runtime. If this value is +true, hot reloading for individual microservices is managed by the +configuration of that microservice.

danger

While hot reloading is useful for development, it is not recommended for use in +production.

allowCycles

An optional boolean, defaulting to false, indicating if dependency cycles +are allowed between microservices managed by the runtime. When the Platformatic +Runtime parses the provided configuration, it examines the clients of each +microservice, as well as the services of Platformatic Composer applications to +build a dependency graph. A topological sort is performed on this dependency +graph so that each service is started after all of its dependencies have been +started. If there are cycles, the topological sort fails and the Runtime does +not start any applications.

If allowCycles is true, the topological sort is skipped, and the +microservices are started in the order specified in the configuration file.

telemetry

Open Telemetry is optionally supported with these settings:

  • serviceName (required, string) — Name of the service as will be reported in open telemetry. In the runtime case, the name of the services as reported in traces is ${serviceName}-${serviceId}, where serviceId is the id of the service in the runtime.
  • version (string) — Optional version (free form)
  • skip (array). Optional list of operations to skip when exporting telemetry defined object with properties:
    • method: GET, POST, PUT, DELETE, PATCH, HEAD, OPTIONS, TRACE
    • path. e.g.: /documentation/json
  • exporter (object or array) — Exporter configuration. If not defined, the exporter defaults to console. If an array of objects is configured, every object must be a valid exporter object. The exporter object has the following properties:
    • type (string) — Exporter type. Supported values are console, otlp, zipkin and memory (default: console). memory is only supported for testing purposes.
    • options (object) — These options are supported:
      • url (string) — The URL to send the telemetry to. Required for otlp exporter. This has no effect on console and memory exporters.
      • headers (object) — Optional headers to send with the telemetry. This has no effect on console and memory exporters.

Note that OTLP traces can be consumed by different solutions, like Jaeger. Here the full list.

Example

{
"telemetry": {
"serviceName": "test-service",
"exporter": {
"type": "otlp",
"options": {
"url": "http://localhost:4318/v1/traces"
}
}
}
}

Environment variable placeholders

The value for any configuration setting can be replaced with an environment +variable by adding a placeholder in the configuration file, for example +{PLT_ENTRYPOINT}.

All placeholders in a configuration must be available as an environment +variable and must meet the +allowed placeholder name rules.

Setting environment variables

If a .env file exists it will automatically be loaded by Platformatic using +dotenv. For example:

.env
PLT_ENTRYPOINT=service

The .env file must be located in the same folder as the Platformatic +configuration file or in the current working directory.

Environment variables can also be set directly on the commmand line, for example:

PLT_ENTRYPOINT=service npx platformatic runtime

Allowed placeholder names

Only placeholder names prefixed with PLT_, or that are in this allow list, +will be dynamically replaced in the configuration file:

  • PORT
  • DATABASE_URL

This restriction is to avoid accidentally exposing system environment variables. +An error will be raised by Platformatic if it finds a configuration placeholder +that isn't allowed.

The default allow list can be extended by passing a --allow-env CLI option +with a comma separated list of strings, for example:

npx platformatic runtime --allow-env=HOST,SERVER_LOGGER_LEVEL

If --allow-env is passed as an option to the CLI, it will be merged with the +default allow list.

+ + + + \ No newline at end of file diff --git a/docs/0.41.1/reference/runtime/introduction/index.html b/docs/0.41.1/reference/runtime/introduction/index.html new file mode 100644 index 00000000000..693d1eadef7 --- /dev/null +++ b/docs/0.41.1/reference/runtime/introduction/index.html @@ -0,0 +1,33 @@ + + + + + +Platformatic Runtime | Platformatic Open Source Software + + + + + +
+
Version: 0.41.1

Platformatic Runtime

Platformatic Runtime is an environment for running multiple Platformatic +microservices as a single monolithic deployment unit.

info

Platformatic Runtime is currently in public beta.

Features

Public beta

Platformatic Runtime is in public beta. You can use it in production, but it's quite +likely that you'll encounter significant bugs.

If you run into a bug or have a suggestion for improvement, please +raise an issue on GitHub.

Standalone usage

If you're only interested in the features available in Platformatic Runtime, you can replace platformatic with @platformatic/runtime in the dependencies of your package.json, so that you'll import fewer deps.

Example configuration file

The following configuration file can be used to start a new Platformatic +Runtime project. For more details on the configuration file, see the +configuration documentation.

{
"$schema": "https://platformatic.dev/schemas/v0.26.0/runtime",
"autoload": {
"path": "./packages",
"exclude": ["docs"]
},
"entrypoint": "entrypointApp"
}

TypeScript Compilation

Platformatic Runtime streamlines the compilation of all services built on TypeScript with the command +plt runtime compile. The TypeScript compiler (tsc) is required to be installed separately.

Interservice communication

The Platformatic Runtime allows multiple microservice applications to run +within a single process. Only the entrypoint binds to an operating system +port and can be reached from outside of the runtime.

Within the runtime, all interservice communication happens by injecting HTTP +requests into the running servers, without binding them to ports. This injection +is handled by +fastify-undici-dispatcher.

Each microservice is assigned an internal domain name based on its unique ID. +For example, a microservice with the ID awesome is given the internal domain +of http://awesome.plt.local. The fastify-undici-dispatcher module maps that +domain to the Fastify server running the awesome microservice. Any Node.js +APIs based on Undici, such as fetch(), will then automatically route requests +addressed to awesome.plt.local to the corresponding Fastify server.

+ + + + \ No newline at end of file diff --git a/docs/0.41.1/reference/runtime/programmatic/index.html b/docs/0.41.1/reference/runtime/programmatic/index.html new file mode 100644 index 00000000000..6297990f6d8 --- /dev/null +++ b/docs/0.41.1/reference/runtime/programmatic/index.html @@ -0,0 +1,28 @@ + + + + + +Programmatic API | Platformatic Open Source Software + + + + + +
+
Version: 0.41.1

Programmatic API

In many cases it's useful to start Platformatic applications using an API +instead of the command line. The @platformatic/runtime API makes it simple to +work with different application types (e.g. service, db, composer and runtime) without +needing to know the application type a priori.

buildServer()

The buildServer function creates a server from a provided configuration +object or configuration filename. +The config can be of either Platformatic Service, Platformatic DB, +Platformatic Composer or any other application built on top of +Platformatic Service.

import { buildServer } from '@platformatic/runtime'

const app = await buildServer('path/to/platformatic.runtime.json')
const entrypointUrl = await app.start()

// Make a request to the entrypoint.
const res = await fetch(entrypointUrl)
console.log(await res.json())

// Do other interesting things.

await app.close()

It is also possible to customize the configuration:

import { buildServer } from '@platformatic/runtime'

const config = {
// $schema: 'https://platformatic.dev/schemas/v0.39.0/runtime',
// $schema: 'https://platformatic.dev/schemas/v0.39.0/service',
// $schema: 'https://platformatic.dev/schemas/v0.39.0/db',
// $schema: 'https://platformatic.dev/schemas/v0.39.0/composer'
...
}
const app = await buildServer(config)

await app.start()

loadConfig()

The loadConfig function is used to read and parse a configuration file for +an arbitrary Platformatic application.

import { loadConfig } from '@platformatic/runtime'

// Read the config based on command line arguments. loadConfig() will detect
// the application type.
const config = await loadConfig({}, ['-c', '/path/to/platformatic.config.json'])

// Read the config based on command line arguments. The application type can
// be provided explicitly.
const config = await loadConfig(
{},
['-c', '/path/to/platformatic.config.json']
)

// Default config can be specified.
const config = await loadConfig(
{},
['-c', '/path/to/platformatic.config.json'],
{ key: 'value' }
)

start()

The start function loads a configuration, builds a server, and starts the +server. However, the server is not returned.

import { start } from '@platformatic/runtime'

await start(['-c', '/path/to/platformatic.config.json])

startCommand()

The startCommand function is similar to start. However, if an exception +occurs, startCommand logs the error and exits the process. This is different +from start, which throws the exception.

import { startCommand } from '@platformatic/runtime'

await startCommand(['-c', '/path/to/platformatic.config.json])
+ + + + \ No newline at end of file diff --git a/docs/0.41.1/reference/service/configuration/index.html b/docs/0.41.1/reference/service/configuration/index.html new file mode 100644 index 00000000000..f25a2639279 --- /dev/null +++ b/docs/0.41.1/reference/service/configuration/index.html @@ -0,0 +1,38 @@ + + + + + +Configuration | Platformatic Open Source Software + + + + + +
+
Version: 0.41.1

Configuration

Platformatic Service configured with a configuration file. It supports the use +of environment variables as setting values with configuration placeholders.

Configuration file

If the Platformatic CLI finds a file in the current working directory matching +one of these filenames, it will automatically load it:

  • platformatic.service.json
  • platformatic.service.json5
  • platformatic.service.yml or platformatic.service.yaml
  • platformatic.service.tml or platformatic.service.toml

Alternatively, a --config option with a configuration +filepath can be passed to most platformatic service CLI commands.

The configuration examples in this reference use JSON.

Supported formats

FormatExtensions
JSON.json
JSON5.json5
YAML.yml, .yaml
TOML.tml

Comments are supported by the JSON5, YAML and TOML file formats.

Settings

Configuration settings are organised into the following groups:

Sensitive configuration settings, such as a database connection URL that contains +a password, should be set using configuration placeholders.

server

A required object with the following settings:

  • hostname (required, string) — Hostname where Platformatic Service server will listen for connections.

  • port (required, number) — Port where Platformatic Service server will listen for connections.

  • healthCheck (boolean or object) — Enables the health check endpoint.

    • Powered by @fastify/under-pressure.
    • The value can be an object, used to specify the interval between checks in milliseconds (default: 5000)

    Example

    {
    "server": {
    ...
    "healthCheck": {
    "interval": 2000
    }
    }
    }
  • cors (object) — Configuration for Cross-Origin Resource Sharing (CORS) headers.

    • All options will be passed to the @fastify/cors plugin. In order to specify a RegExp object, you can pass { regexp: 'yourregexp' }, +it will be automatically converted.
  • logger (object) -- the logger configuration.

  • pluginTimeout (integer) -- the number of milliseconds to wait for a Fastify plugin to load, see the fastify docs for more details.

  • https (object) - Configuration for HTTPS supporting the following options.

    • key (required, string, object, or array) - If key is a string, it specifies the private key to be used. If key is an object, it must have a path property specifying the private key file. Multiple keys are supported by passing an array of keys.
    • cert (required, string, object, or array) - If cert is a string, it specifies the certificate to be used. If cert is an object, it must have a path property specifying the certificate file. Multiple certificates are supported by passing an array of keys.

metrics

Configuration for a Prometheus server that will export monitoring metrics +for the current server instance. It uses fastify-metrics +under the hood.

This setting can be a boolean or an object. If set to true the Prometheus server will listen on http://0.0.0.0:9090.

Supported object properties:

  • hostname (string) — The hostname where Prometheus server will listen for connections.
  • port (number) — The port where Prometheus server will listen for connections.
  • auth (object) — Basic Auth configuration. username and password are required here +(use environment variables).

plugins

An optional object that defines the plugins loaded by Platformatic Service.

  • paths (required, array): an array of paths (string) +or an array of objects composed as follows,
    • path (string): Relative path to plugin's entry point.
    • options (object): Optional plugin options.
    • encapsulate (boolean): if the path is a folder, it instruct Platformatic to not encapsulate those plugins.
    • maxDepth (integer): if the path is a folder, it limits the depth to load the content from.
  • typescript (boolean or object): enable TypeScript compilation. A tsconfig.json file is required in the same folder.

Example

{
"plugins": {
"paths": [{
"path": "./my-plugin.js",
"options": {
"foo": "bar"
}
}]
}
}

typescript compilation options

The typescript can also be an object to customize the compilation. Here are the supported options:

  • enabled (boolean): enables compilation
  • tsConfig (string): path to the tsconfig.json file relative to the configuration
  • outDir (string): the output directory of tsconfig.json, in case tsconfig.json is not available +and and enabled is set to false (procution build)
  • flags (array of string): flags to be passed to tsc. Overrides tsConfig. +

Example:

{
"plugins": {
"paths": [{
"path": "./my-plugin.js",
"options": {
"foo": "bar"
}
}],
"typescript": {
"enabled": false,
"tsConfig": "./path/to/tsconfig.json",
"outDir": "dist"
}
}
}

watch

Disable watching for file changes if set to false. It can also be customized with the following options:

  • ignore (string[], default: null): List of glob patterns to ignore when watching for changes. If null or not specified, ignore rule is not applied. Ignore option doesn't work for typescript files.

  • allow (string[], default: ['*.js', '**/*.js']): List of glob patterns to allow when watching for changes. If null or not specified, allow rule is not applied. Allow option doesn't work for typescript files.

    Example

    {
    "watch": {
    "ignore": ["*.mjs", "**/*.mjs"],
    "allow": ["my-plugin.js", "plugins/*.js"]
    }
    }

service

Configure @platformatic/service specific settings such as graphql or openapi:

  • graphql (boolean or object, default: false) — Controls the GraphQL API interface, with optional GraphiQL UI.

    Examples

    Enables GraphQL support

    {
    "service": {
    "graphql": true
    }
    }

    Enables GraphQL support with GraphiQL

    {
    "service": {
    "graphql": {
    "graphiql": true
    }
    }
    }
  • openapi (boolean or object, default: false) — Enables OpenAPI REST support.

    • If value is an object, all OpenAPI v3 allowed properties can be passed. Also a prefix property can be passed to set the OpenAPI prefix.
    • Platformatic Service uses @fastify/swagger under the hood to manage this configuration.

    Examples

    Enables OpenAPI

    {
    "service": {
    ...
    "openapi": true
    }
    }

    Enables OpenAPI with prefix

    {
    "service": {
    "openapi": {
    "prefix": "/api"
    }
    }
    }

    Enables OpenAPI with options

    {
    "service": {
    "openapi": {
    "info": {
    "title": "Platformatic Service",
    "description": "Exposing a SQL database as REST"
    }
    }
    }
    }

telemetry

Open Telemetry is optionally supported with these settings:

  • serviceName (required, string) — Name of the service as will be reported in open telemetry.
  • version (string) — Optional version (free form)
  • skip (array). Optional list of operations to skip when exporting telemetry defined object with properties:
    • method: GET, POST, PUT, DELETE, PATCH, HEAD, OPTIONS, TRACE
    • path. e.g.: /documentation/json
  • exporter (object or array) — Exporter configuration. If not defined, the exporter defaults to console. If an array of objects is configured, every object must be a valid exporter object. The exporter object has the following properties:
    • type (string) — Exporter type. Supported values are console, otlp, zipkin and memory (default: console). memory is only supported for testing purposes.
    • options (object) — These options are supported:
      • url (string) — The URL to send the telemetry to. Required for otlp exporter. This has no effect on console and memory exporters.
      • headers (object) — Optional headers to send with the telemetry. This has no effect on console and memory exporters.

Note that OTLP traces can be consumed by different solutions, like Jaeger. Here the full list.

Example

{
"telemetry": {
"serviceName": "test-service",
"exporter": {
"type": "otlp",
"options": {
"url": "http://localhost:4318/v1/traces"
}
}
}
}

Environment variable placeholders

The value for any configuration setting can be replaced with an environment variable +by adding a placeholder in the configuration file, for example {PLT_SERVER_LOGGER_LEVEL}.

All placeholders in a configuration must be available as an environment variable +and must meet the allowed placeholder name rules.

Example

platformatic.service.json
{
"server": {
"port": "{PORT}"
}
}

Platformatic will replace the placeholders in this example with the environment +variables of the same name.

Setting environment variables

If a .env file exists it will automatically be loaded by Platformatic using +dotenv. For example:

.env
PLT_SERVER_LOGGER_LEVEL=info
PORT=8080

The .env file must be located in the same folder as the Platformatic configuration +file or in the current working directory.

Environment variables can also be set directly on the commmand line, for example:

PLT_SERVER_LOGGER_LEVEL=debug npx platformatic service

Allowed placeholder names

Only placeholder names prefixed with PLT_, or that are in this allow list, will be +dynamically replaced in the configuration file:

  • PORT
  • DATABASE_URL

This restriction is to avoid accidentally exposing system environment variables. +An error will be raised by Platformatic if it finds a configuration placeholder +that isn't allowed.

The default allow list can be extended by passing a --allow-env CLI option with a +comma separated list of strings, for example:

npx platformatic service --allow-env=HOST,SERVER_LOGGER_LEVEL

If --allow-env is passed as an option to the CLI, it will be merged with the +default allow list.

+ + + + \ No newline at end of file diff --git a/docs/0.41.1/reference/service/introduction/index.html b/docs/0.41.1/reference/service/introduction/index.html new file mode 100644 index 00000000000..ba4a9783d8e --- /dev/null +++ b/docs/0.41.1/reference/service/introduction/index.html @@ -0,0 +1,21 @@ + + + + + +Platformatic Service | Platformatic Open Source Software + + + + + +
+
Version: 0.41.1

Platformatic Service

Platformatic Service is an HTTP server that provides a developer tools for +building robust APIs with Node.js.

For a high level overview of how Platformatic DB works, please reference the +Architecture guide.

info

Platformatic Service is currently in public beta.

Features

Public beta

Platformatic Service is in public beta. You can use it in production, but it's quite +likely that you'll encounter significant bugs.

If you run into a bug or have a suggestion for improvement, please +raise an issue on GitHub.

Standalone usage

If you're only interested in the features available in Platformatic Service, you can simply switch platformatic with @platformatic/service in the dependencies of your package.json, so that you'll only import fewer deps.

You can use the plt-service command, it's the equivalent of plt service.

+ + + + \ No newline at end of file diff --git a/docs/0.41.1/reference/service/plugin/index.html b/docs/0.41.1/reference/service/plugin/index.html new file mode 100644 index 00000000000..d45caa950a6 --- /dev/null +++ b/docs/0.41.1/reference/service/plugin/index.html @@ -0,0 +1,21 @@ + + + + + +Plugin | Platformatic Open Source Software + + + + + +
+
Version: 0.41.1

Plugin

If you want to add features to a service, you will need to register a plugin, which will be in the form of a standard Fastify plugin.

The config file will specify where the plugin file is located as the example below:

{
...
"plugins": {
"paths": ["./plugin/index.js"]
}
}

The path is relative to the config file path.

You should export an async function which receives a parameters

  • app (FastifyInstance) that is the main fastify instance
  • opts all the options specified in the config file after path

Hot Reload

Plugin file is being watched by fs.watch function.

You don't need to reload Platformatic Service server while working on your plugin. Every time you save, the watcher will trigger a reload event and the server will auto-restart and load your updated code.

tip

At this time, on Linux, file watch in subdirectories is not supported due to a Node.js limitation (documented here).

Directories

The path can also be a directory. In that case, the directory will be loaded with @fastify/autoload.

Consider the following directory structure:

├── routes
│ ├── foo
│ │ ├── something.js
│ │ └── bar
│ │ └── baz.js
│ ├── single-plugin
│ │ └── utils.js
│ └── another-plugin.js
└── platformatic.service.json

By default the folder will be added as a prefix to all the routes defined within them. +See the autoload documentation for all the options to customize this behavior.

Multiple plugins

Multiple plugins can be loaded in parallel by specifying an array:

{
...
"plugins": {
"paths": [{
"path": "./plugin/index.js"
}, {
"path": "./routes/"
}]
}
}

TypeScript and Autocompletion

In order to provide the correct typings of the features added by Platformatic Service to your Fastify instance, +add the following at the top of your files:

/// <references types="@platformatic/service" />

Plugin definition with TypeScript

Here is an example of writing a plugin in TypeScript:

/// <reference types="@platformatic/service" />
import { FastifyInstance, FastifyPluginOptions } from 'fastify'

export default async function (fastify: FastifyInstance, opts: FastifyPluginOptions) {
}

Note that you need to add the "typescript": true configuration to your platformatic.service.json.

Loading compiled files

Setting "typescript": false but including a tsconfig.json with an outDir +option, will instruct Platformatic Service to try loading your plugins from that folder instead. +This setup is needed to support pre-compiled sources to reduce cold start time during deployment.

+ + + + \ No newline at end of file diff --git a/docs/0.41.1/reference/service/programmatic/index.html b/docs/0.41.1/reference/service/programmatic/index.html new file mode 100644 index 00000000000..46dddb59e81 --- /dev/null +++ b/docs/0.41.1/reference/service/programmatic/index.html @@ -0,0 +1,19 @@ + + + + + +Programmatic API | Platformatic Open Source Software + + + + + +
+
Version: 0.41.1

Programmatic API

In many cases it's useful to start Platformatic Service using an API instead of +command line, e.g. in tests we want to start and stop our server.

The buildServer function allows that:

import { buildServer } from '@platformatic/service'

const app = await buildServer('path/to/platformatic.service.json')

await app.start()

const res = await fetch(app.url)
console.log(await res.json())

// do something

await app.close()

It is also possible to customize the configuration:

import { buildServer } from '@platformatic/service'

const app = await buildServer({
server: {
hostname: '127.0.0.1',
port: 0
}
})

await app.start()

const res = await fetch(app.url)
console.log(await res.json())

// do something

await app.close()

Creating a reusable application on top of Platformatic Service

Platformatic DB is built on top of Platformatic Serivce. +If you want to build a similar kind of tool, follow this example:

import { buildServer, schema } from '@platformatic/service'

async function myPlugin (app, opts) {
// app.platformatic.configManager contains an instance of the ConfigManager
console.log(app.platformatic.configManager.current)

await platformaticService(app, opts)
}

// break Fastify encapsulation
myPlugin[Symbol.for('skip-override')] = true
myPlugin.configType = 'myPlugin'

// This is the schema for this reusable application configuration file,
// customize at will but retain the base properties of the schema from
// @platformatic/service
myPlugin.schema = schema

// The configuration of the ConfigManager
myPlugin.configManagerConfig = {
schema: foo.schema,
envWhitelist: ['PORT', 'HOSTNAME'],
allowToWatch: ['.env'],
schemaOptions: {
useDefaults: true,
coerceTypes: true,
allErrors: true,
strict: false
},
async transformConfig () {
console.log(this.current) // this is the current config

// In this method you can alter the configuration before the application
// is started. It's useful to apply some defaults that cannot be derived
// inside the schema, such as resolving paths.
}
}


const server = await buildServer('path/to/config.json', myPlugin)

await server.start()

const res = await fetch(server.listeningOrigin)
console.log(await res.json())

// do something

await service.close()
+ + + + \ No newline at end of file diff --git a/docs/0.41.1/reference/sql-events/fastify-plugin/index.html b/docs/0.41.1/reference/sql-events/fastify-plugin/index.html new file mode 100644 index 00000000000..09b44918a55 --- /dev/null +++ b/docs/0.41.1/reference/sql-events/fastify-plugin/index.html @@ -0,0 +1,19 @@ + + + + + +Fastify Plugin | Platformatic Open Source Software + + + + + +
+
Version: 0.41.1

Fastify Plugin

The @platformatic/sql-events package exports a Fastify plugin that can be used out-of the box in a server application. +It requires that @platformatic/sql-mapper is registered before it.

The plugin has the following options:

The plugin adds the following properties to the app.platformatic object:

  • mq — an instance of mqemitter
  • subscribe(topics) — a method to create a node Readable +that will contain the events emitted by those topics.

Each entities of app.platformatic.entities will be augmented with two functions:

  • entity.getPublishTopic({ ctx, data, action })
  • entity.getSubscriptionTopic({ ctx, action })

Where ctx is the GraphQL Context, data is the object that will be emitted and action is either save or delete.

Usage

'use strict'

const Fastify = require('fastify')
const mapper = require('@platformatic/sql-mapper')
const events = require('@platformatic/sql-events')

async function main() {
const app = Fastify({
logger: {
level: 'info'
}
})
app.register(mapper.plugin, {
connectionString: 'postgres://postgres:postgres@127.0.0.1/postgres'
})

app.register(events)

// setup your routes


await app.listen({ port: 3333 })
}

main()
+ + + + \ No newline at end of file diff --git a/docs/0.41.1/reference/sql-events/introduction/index.html b/docs/0.41.1/reference/sql-events/introduction/index.html new file mode 100644 index 00000000000..2072155de98 --- /dev/null +++ b/docs/0.41.1/reference/sql-events/introduction/index.html @@ -0,0 +1,21 @@ + + + + + +Introduction to the sql-events module | Platformatic Open Source Software + + + + + +
+
Version: 0.41.1

Introduction to the sql-events module

The Platformatic DB sql-events uses mqemitter to publish events when entities are saved and deleted.

These events are useful to distribute updates to clients, e.g. via WebSocket, Server-Sent Events, or GraphQL Subscritions. +When subscribing and using a multi-process system with a broker like Redis, a subscribed topic will receive the data from all +the other processes.

They are not the right choice for executing some code whenever an entity is created, modified or deleted, in that case +use @platformatic/sql-mapper hooks.

Install

You can use together with @platformatic/sql-mapper.

npm i @platformatic/sql-mapper @platformatic/sql-events

Usage

const { connect } = require('@platformatic/sql-mapper')
const { setupEmitter } = require('@platformatic/sql-events')
const { pino } = require('pino')

const log = pino()

async function onDatabaseLoad (db, sql) {
await db.query(sql`CREATE TABLE pages (
id SERIAL PRIMARY KEY,
title VARCHAR(255) NOT NULL
);`)
}
const connectionString = 'postgres://postgres:postgres@localhost:5432/postgres'
const mapper = await connect({
connectionString,
log,
onDatabaseLoad,
ignore: {},
hooks: {
Page: {
find: async function(_find, opts) {
console.log('hook called');
return await _find(opts)
}
}
}
})

setupEmitter({ mapper, log })

const pageEntity = mapper.entities.page

const queue = await mapper.subscribe([
pageEntity.getSubscriptionTopic({ action: 'save' }),
pageEntity.getSubscriptionTopic({ action: 'delete' })
])

const page = await pageEntity.save({
input: { title: 'fourth page' }
})

const page2 = await pageEntity.save({
input: {
id: page.id,
title: 'fifth page'
}
})

await pageEntity.delete({
where: {
id: {
eq: page.id
}
},
fields: ['id', 'title']
})

for await (const ev of queue) {
console.log(ev)
if (expected.length === 0) {
break
}
}

process.exit(0)

API

The setupEmitter function has the following options:

The setupEmitter functions adds the following properties to the mapper object:

  • mq — an instance of mqemitter
  • subscribe(topics) — a method to create a node Readable +that will contain the events emitted by those topics.

Each entities of app.platformatic.entities will be augmented with two functions:

  • entity.getPublishTopic({ ctx, data, action })
  • entity.getSubscriptionTopic({ ctx, action })

Where ctx is the GraphQL Context, data is the object that will be emitted and action is either save or delete.

+ + + + \ No newline at end of file diff --git a/docs/0.41.1/reference/sql-graphql/ignore/index.html b/docs/0.41.1/reference/sql-graphql/ignore/index.html new file mode 100644 index 00000000000..71cd679ac16 --- /dev/null +++ b/docs/0.41.1/reference/sql-graphql/ignore/index.html @@ -0,0 +1,17 @@ + + + + + +Ignoring types and fields | Platformatic Open Source Software + + + + + + + + + + \ No newline at end of file diff --git a/docs/0.41.1/reference/sql-graphql/introduction/index.html b/docs/0.41.1/reference/sql-graphql/introduction/index.html new file mode 100644 index 00000000000..30cb48b1dbf --- /dev/null +++ b/docs/0.41.1/reference/sql-graphql/introduction/index.html @@ -0,0 +1,21 @@ + + + + + +Introduction to the GraphQL API | Platformatic Open Source Software + + + + + +
+
Version: 0.41.1

Introduction to the GraphQL API

The Platformatic DB GraphQL plugin starts a GraphQL server wand makes it available +via a /graphql endpoint. This endpoint is automatically ready to run queries and +mutations against your entities. This functionality is powered by +Mercurius.

GraphiQL

The GraphiQL web UI is integrated into +Platformatic DB. To enable it you can pass an option to the sql-graphql plugin:

app.register(graphqlPlugin, { graphiql: true })

The GraphiQL interface is made available under the /graphiql path.

+ + + + \ No newline at end of file diff --git a/docs/0.41.1/reference/sql-graphql/many-to-many/index.html b/docs/0.41.1/reference/sql-graphql/many-to-many/index.html new file mode 100644 index 00000000000..5b1a83a8f65 --- /dev/null +++ b/docs/0.41.1/reference/sql-graphql/many-to-many/index.html @@ -0,0 +1,20 @@ + + + + + +Many To Many Relationship | Platformatic Open Source Software + + + + + +
+
Version: 0.41.1

Many To Many Relationship

Many-to-Many relationship lets you relate each row in one table to many rows in +another table and vice versa.

Many-to-many relationship are implemented in SQL via a "join table", a table whose primary key +is composed by the identifier of the two parts of the many-to-many relationship.

Platformatic DB fully support many-to-many relationships on all supported databases.

Example

Consider the following schema (SQLite):

CREATE TABLE pages (
id INTEGER PRIMARY KEY,
the_title VARCHAR(42)
);

CREATE TABLE users (
id INTEGER PRIMARY KEY,
username VARCHAR(255) NOT NULL
);

CREATE TABLE editors (
page_id INTEGER NOT NULL,
user_id INTEGER NOT NULL,
role VARCHAR(255) NOT NULL,
CONSTRAINT fk_editor_pages FOREIGN KEY (page_id) REFERENCES pages(id),
CONSTRAINT fk_editor_users FOREIGN KEY (user_id) REFERENCES users(id),
PRIMARY KEY (page_id, user_id)
);

The table editors is a "join table" between users and pages. +Given this schema, you could issue queries like:

query {
editors(orderBy: { field: role, direction: DESC }) {
user {
id
username
}
page {
id
theTitle
}
role
}
}

Mutation works exactly the same as before:

mutation {
saveEditor(input: { userId: "1", pageId: "1", role: "captain" }) {
user {
id
username
}
page {
id
theTitle
}
role
}
}
+ + + + \ No newline at end of file diff --git a/docs/0.41.1/reference/sql-graphql/mutations/index.html b/docs/0.41.1/reference/sql-graphql/mutations/index.html new file mode 100644 index 00000000000..0e43b4b205a --- /dev/null +++ b/docs/0.41.1/reference/sql-graphql/mutations/index.html @@ -0,0 +1,20 @@ + + + + + +Mutations | Platformatic Open Source Software + + + + + +
+
Version: 0.41.1

Mutations

When the GraphQL plugin is loaded, some mutations are automatically adding to +the GraphQL schema.

save[ENTITY]

Saves a new entity to the database or updates an existing entity. +This actually behaves as an upsert, allowing both behaviours depending on the presence of the primary key field.

Example

'use strict'

const Fastify = require('fastify')
const graphqlPlugin = require('@platformatic/sql-graphql')
const sqlMapper = require('@platformatic/sql-mapper')

async function main() {
const app = Fastify({
logger: {
level: 'info'
}
})
app.register(sqlMapper, {
connectionString: 'postgres://postgres:postgres@127.0.0.1/postgres',
log: logger,
})
app.register(graphqlPlugin, {
graphiql: true
})
const res = await app.inject({
method: 'POST',
url: '/graphql',
body: {
query: `
mutation {
savePage(input: { id: 3 title: "Platformatic is cool!" }) {
id
title
}
}
`
}
})
const result = await res.json()
console.log(result.data) // { savePage: { id: '3', title: 'Platformatic is cool!' } }
await app.close()
}

main()

insert[ENTITY]

Inserts a new entity in the database.

Example

'use strict'

const Fastify = require('fastify')
const graphqlPlugin = require('@platformatic/sql-graphql')
const sqlMapper = require('@platformatic/sql-mapper')

async function main() {
const app = Fastify({
logger: {
level: 'info'
}
})
app.register(sqlMapper, {
connectionString: 'postgres://postgres:postgres@127.0.0.1/postgres',
log: logger,
})
app.register(graphqlPlugin, {
graphiql: true
})
const res = await app.inject({
method: 'POST',
url: '/graphql',
body: {
query: `
mutation {
savePage(input: { title: "Platformatic is cool!" }) {
id
title
}
}
`
}
})
const result = await res.json()
console.log(result.data) // { savePage: { id: '4', title: 'Platformatic is cool!' } }
await app.close()
}

main()

delete[ENTITIES]

Deletes one or more entities from the database, based on the where clause +passed as an input to the mutation.

Example

'use strict'

const Fastify = require('fastify')
const graphqlPlugin = require('@platformatic/sql-graphql')
const sqlMapper = require('@platformatic/sql-mapper')

async function main() {
const app = Fastify({
logger: {
level: 'info'
}
})
app.register(sqlMapper, {
connectionString: 'postgres://postgres:postgres@127.0.0.1/postgres',
log: logger,
})
app.register(graphqlPlugin, {
graphiql: true
})
const res = await app.inject({
method: 'POST',
url: '/graphql',
body: {
query: `
mutation {
deletePages(where: { id: { eq: "3" } }) {
id
title
}
}
`
}
})
const result = await res.json()
console.log(result.data) // { deletePages: [ { id: '3', title: 'Platformatic is cool!' } ] }
await app.close()
}

main()
+ + + + \ No newline at end of file diff --git a/docs/0.41.1/reference/sql-graphql/queries/index.html b/docs/0.41.1/reference/sql-graphql/queries/index.html new file mode 100644 index 00000000000..24b1a378f40 --- /dev/null +++ b/docs/0.41.1/reference/sql-graphql/queries/index.html @@ -0,0 +1,21 @@ + + + + + +Queries | Platformatic Open Source Software + + + + + +
+
Version: 0.41.1

Queries

A GraphQL query is automatically added to the GraphQL schema for each database +table, along with a complete mapping for all table fields.

Example

'use strict'

const Fastify = require('fastify')
const graphqlPlugin = require('@platformatic/sql-graphql')
const sqlMapper = require('@platformatic/sql-mapper')
async function main() {
const app = Fastify({
logger: {
level: 'info'
}
})
app.register(sqlMapper, {
connectionString: 'postgres://postgres:postgres@127.0.0.1/postgres'
})
app.register(graphqlPlugin, {
graphiql: true
})
const res = await app.inject({
method: 'POST',
url: '/graphql',
body: {
query: `
query{
pages{
id,
title
}
}
`
}
})
const result = await res.json()
console.log(result.data)
await app.close()
}
main()

Advanced Queries

The following additional queries are added to the GraphQL schema for each entity:

get[ENTITY]by[PRIMARY_KEY]

If you have a table pages with the field id as the primary key, you can run +a query called getPageById.

Example

...
const res = await app.inject({
method: 'POST',
url: '/graphql',
body: {
query: `
query{
getPageById(id: 3) {
id,
title
}
}
`
}
})
const result = await res.json()
console.log(result.data) // { getPageById: { id: '3', title: 'A fiction' } }

count[ENTITIES]

...
const res = await app.inject({
method: 'POST',
url: '/graphql',
body: {
query: `
query {
countPages {
total
}
}
`
}
})
const result = await res.json()
console.log(result.data) // { countMovies : { total: { 17 } }

Pagination

The Platformatic DB supports for result's pagination through input parameters: limit and offset

Example

{
users(limit:5, offset: 10) {
name
}
}

It returns 5 users starting from position 10.

Limit

By default a limit value (10) is applied to each request.

Clients can override this behavior by passing a value. +In this case the server validates the input and an error is return if exceeds the max accepted value (100).

Limit's values can be customized through configuration:

{
...
"db": {
...
"limit": {
"default": 50,
"max": 1000
}
}
}

Limit only accepts values >= 0. Otherwise an error is return.

Offset

By default offset is not applied to the request. +Clients can override this behavior by passing a value.

Offset only accepts values >= 0. Otherwise an error is return.

+ + + + \ No newline at end of file diff --git a/docs/0.41.1/reference/sql-graphql/subscriptions/index.html b/docs/0.41.1/reference/sql-graphql/subscriptions/index.html new file mode 100644 index 00000000000..5d9a9115917 --- /dev/null +++ b/docs/0.41.1/reference/sql-graphql/subscriptions/index.html @@ -0,0 +1,19 @@ + + + + + +Subscription | Platformatic Open Source Software + + + + + +
+
Version: 0.41.1

Subscription

When the GraphQL plugin is loaded, some subscriptions are automatically adding to +the GraphQL schema if the @platformatic/sql-events plugin has been previously registered.

It's possible to avoid creating the subscriptions for a given entity by adding the subscriptionIgnore config, +like so: subscriptionIgnore: ['page'].

[ENTITY]Saved

Published whenever an entity is saved, e.g. when the mutation insert[ENTITY] or save[ENTITY] are called.

[ENTITY]Deleted

Published whenever an entity is deleted, e.g. when the mutation delete[ENTITY] is called..

+ + + + \ No newline at end of file diff --git a/docs/0.41.1/reference/sql-mapper/entities/api/index.html b/docs/0.41.1/reference/sql-mapper/entities/api/index.html new file mode 100644 index 00000000000..c0a1e25ddef --- /dev/null +++ b/docs/0.41.1/reference/sql-mapper/entities/api/index.html @@ -0,0 +1,18 @@ + + + + + +API | Platformatic Open Source Software + + + + + +
+
Version: 0.41.1

API

A set of operation methods are available on each entity:

Returned fields

The entity operation methods accept a fields option that can specify an array of field names to be returned. If not specified, all fields will be returned.

Where clause

The entity operation methods accept a where option to allow limiting of the database rows that will be affected by the operation.

The where object's key is the field you want to check, the value is a key/value map where the key is an operator (see the table below) and the value is the value you want to run the operator against.

Platformatic operatorSQL operator
eq'='
in'IN'
nin'NOT IN'
neq'<>'
gt'>'
gte'>='
lt'<'
lte'<='
like'LIKE'

Examples

Selects row with id = 1

{
...
"where": {
id: {
eq: 1
}
}
}

Select all rows with id less than 100

{
...
"where": {
id: {
lt: 100
}
}
}

Select all rows with id 1, 3, 5 or 7

{
...
"where": {
id: {
in: [1, 3, 5, 7]
}
}
}

Where clause operations are by default combined with the AND operator. To combine them with the OR operator, use the or key.

Select all rows with id 1 or 3

{
...
"where": {
or: [
{
id: {
eq: 1
}
},
{
id: {
eq: 3
}
}
]
}
}

Select all rows with id 1 or 3 and title like 'foo%'

{
...
"where": {
or: [
{
id: {
eq: 1
}
},
{
id: {
eq: 3
}
}
],
title: {
like: 'foo%'
}
}
}

Reference

find

Retrieve data for an entity from the database.

Options

NameTypeDescription
fieldsArray of stringList of fields to be returned for each object
whereObjectWhere clause 🔗
orderByArray of ObjectObject like { field: 'counter', direction: 'ASC' }
limitNumberLimits the number of returned elements
offsetNumberThe offset to start looking for rows from

Usage

'use strict'

const { connect } = require('@platformatic/sql-mapper')
const { pino } = require('pino')
const pretty = require('pino-pretty')
const logger = pino(pretty())

async function main() {
const pgConnectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
const mapper = await connect({
connectionString: pgConnectionString,
log: logger,
})
const res = await mapper.entities.page.find({
fields: ['id', 'title',],
where: {
id: {
lt: 10
}
},
})
logger.info(res)
await mapper.db.dispose()
}
main()

count

Same as find, but only count entities.

Options

NameTypeDescription
whereObjectWhere clause 🔗

Usage

'use strict'

const { connect } = require('@platformatic/sql-mapper')
const { pino } = require('pino')
const pretty = require('pino-pretty')
const logger = pino(pretty())

async function main() {
const pgConnectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
const mapper = await connect({
connectionString: pgConnectionString,
log: logger,
})
const res = await mapper.entities.page.count({
where: {
id: {
lt: 10
}
},
})
logger.info(res)
await mapper.db.dispose()
}
main()

insert

Insert one or more entity rows in the database.

Options

NameTypeDescription
fieldsArray of stringList of fields to be returned for each object
inputsArray of ObjectEach object is a new row

Usage

'use strict'

const { connect } = require('@platformatic/sql-mapper')
const { pino } = require('pino')
const pretty = require('pino-pretty')
const logger = pino(pretty())

async function main() {
const pgConnectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
const mapper = await connect({
connectionString: pgConnectionString,
log: logger,
})
const res = await mapper.entities.page.insert({
fields: ['id', 'title' ],
inputs: [
{ title: 'Foobar' },
{ title: 'FizzBuzz' }
],
})
logger.info(res)
/**
0: {
"id": "16",
"title": "Foobar"
}
1: {
"id": "17",
"title": "FizzBuzz"
}
*/
await mapper.db.dispose()
}
main()

save

Create a new entity row in the database or update an existing one.

To update an existing entity, the id field (or equivalent primary key) must be included in the input object. +save actually behaves as an upsert, allowing both behaviours depending on the presence of the primary key field.

Options

NameTypeDescription
fieldsArray of stringList of fields to be returned for each object
inputObjectThe single row to create/update

Usage

'use strict'
const { connect } = require('@platformatic/sql-mapper')
const { pino } = require('pino')
const pretty = require('pino-pretty')
const logger = pino(pretty())

async function main() {
const connectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
const mapper = await connect({
connectionString: connectionString,
log: logger,
})
const res = await mapper.entities.page.save({
fields: ['id', 'title' ],
input: { id: 1, title: 'FizzBuzz' },
})
logger.info(res)
await mapper.db.dispose()
}
main()

delete

Delete one or more entity rows from the database, depending on the where option. Returns the data for all deleted objects.

Options

NameTypeDescription
fieldsArray of stringList of fields to be returned for each object
whereObjectWhere clause 🔗

Usage

'use strict'
const { connect } = require('@platformatic/sql-mapper')
const { pino } = require('pino')
const pretty = require('pino-pretty')
const logger = pino(pretty())

async function main() {
const connectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
const mapper = await connect({
connectionString: connectionString,
log: logger,
})
const res = await mapper.entities.page.delete({
fields: ['id', 'title',],
where: {
id: {
lt: 4
}
},
})
logger.info(res)
await mapper.db.dispose()
}
main()

updateMany

Update one or more entity rows from the database, depending on the where option. Returns the data for all updated objects.

Options

NameTypeDescription
whereObjectWhere clause 🔗
inputObjectThe new values that want to update
fieldsArray of stringList of fields to be returned for each object

Usage

'use strict'
const { connect } = require('@platformatic/sql-mapper')
const { pino } = require('pino')
const pretty = require('pino-pretty')
const logger = pino(pretty())

async function main() {
const connectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
const mapper = await connect({
connectionString: connectionString,
log: logger,
})
const res = await mapper.entities.page.updateMany({
fields: ['id', 'title',],
where: {
counter: {
gte: 30
}
},
input: {
title: 'Updated title'
}
})
logger.info(res)
await mapper.db.dispose()
}
main()

+ + + + \ No newline at end of file diff --git a/docs/0.41.1/reference/sql-mapper/entities/example/index.html b/docs/0.41.1/reference/sql-mapper/entities/example/index.html new file mode 100644 index 00000000000..45150944903 --- /dev/null +++ b/docs/0.41.1/reference/sql-mapper/entities/example/index.html @@ -0,0 +1,17 @@ + + + + + +Example | Platformatic Open Source Software + + + + + +
+
Version: 0.41.1

Example

Given this PostgreSQL SQL schema:

CREATE TABLE "categories" (
"id" int4 NOT NULL DEFAULT nextval('categories_id_seq'::regclass),
"name" varchar(255) NOT NULL,
PRIMARY KEY ("id")
);

CREATE TABLE "pages" (
"id" int4 NOT NULL DEFAULT nextval('pages_id_seq'::regclass),
"title" varchar(255) NOT NULL,
"category_id" int4,
"user_id" int4,
PRIMARY KEY ("id")
);

ALTER TABLE "pages" ADD FOREIGN KEY ("category_id") REFERENCES "categories"("id");

app.platformatic.entities will contain this mapping object:

{
"category": {
"name": "Category",
"singularName": "category",
"pluralName": "categories",
"primaryKey": "id",
"table": "categories",
"fields": {
"id": {
"sqlType": "int4",
"isNullable": false,
"primaryKey": true,
"camelcase": "id"
},
"name": {
"sqlType": "varchar",
"isNullable": false,
"camelcase": "name"
}
},
"camelCasedFields": {
"id": {
"sqlType": "int4",
"isNullable": false,
"primaryKey": true,
"camelcase": "id"
},
"name": {
"sqlType": "varchar",
"isNullable": false,
"camelcase": "name"
}
},
"relations": [],
"reverseRelationships": [
{
"sourceEntity": "Page",
"relation": {
"constraint_catalog": "postgres",
"constraint_schema": "public",
"constraint_name": "pages_category_id_fkey",
"table_catalog": "postgres",
"table_schema": "public",
"table_name": "pages",
"constraint_type": "FOREIGN KEY",
"is_deferrable": "NO",
"initially_deferred": "NO",
"enforced": "YES",
"column_name": "category_id",
"ordinal_position": 1,
"position_in_unique_constraint": 1,
"foreign_table_name": "categories",
"foreign_column_name": "id"
}
}
]
},
"page": {
"name": "Page",
"singularName": "page",
"pluralName": "pages",
"primaryKey": "id",
"table": "pages",
"fields": {
"id": {
"sqlType": "int4",
"isNullable": false,
"primaryKey": true,
"camelcase": "id"
},
"title": {
"sqlType": "varchar",
"isNullable": false,
"camelcase": "title"
},
"category_id": {
"sqlType": "int4",
"isNullable": true,
"foreignKey": true,
"camelcase": "categoryId"
},
"user_id": {
"sqlType": "int4",
"isNullable": true,
"camelcase": "userId"
}
},
"camelCasedFields": {
"id": {
"sqlType": "int4",
"isNullable": false,
"primaryKey": true,
"camelcase": "id"
},
"title": {
"sqlType": "varchar",
"isNullable": false,
"camelcase": "title"
},
"categoryId": {
"sqlType": "int4",
"isNullable": true,
"foreignKey": true,
"camelcase": "categoryId"
},
"userId": {
"sqlType": "int4",
"isNullable": true,
"camelcase": "userId"
}
},
"relations": [
{
"constraint_catalog": "postgres",
"constraint_schema": "public",
"constraint_name": "pages_category_id_fkey",
"table_catalog": "postgres",
"table_schema": "public",
"table_name": "pages",
"constraint_type": "FOREIGN KEY",
"is_deferrable": "NO",
"initially_deferred": "NO",
"enforced": "YES",
"column_name": "category_id",
"ordinal_position": 1,
"position_in_unique_constraint": 1,
"foreign_table_name": "categories",
"foreign_column_name": "id"
}
],
"reverseRelationships": []
}
}
+ + + + \ No newline at end of file diff --git a/docs/0.41.1/reference/sql-mapper/entities/fields/index.html b/docs/0.41.1/reference/sql-mapper/entities/fields/index.html new file mode 100644 index 00000000000..4420daf8ff6 --- /dev/null +++ b/docs/0.41.1/reference/sql-mapper/entities/fields/index.html @@ -0,0 +1,17 @@ + + + + + +Fields | Platformatic Open Source Software + + + + + +
+
Version: 0.41.1

Fields

When Platformatic DB inspects a database's schema, it creates an object for each table that contains a mapping of their fields.

These objects contain the following properties:

  • singularName: singular entity name, based on table name. Uses inflected under the hood.
  • pluralName: plural entity name (i.e 'pages')
  • primaryKey: the field which is identified as primary key.
  • table: original table name
  • fields: an object containing all fields details. Object key is the field name.
  • camelCasedFields: an object containing all fields details in camelcase. If you have a column named user_id you can access it using both userId or user_id

Fields detail

  • sqlType: The original field type. It may vary depending on the underlying DB Engine
  • isNullable: Whether the field can be null or not
  • primaryKey: Whether the field is the primary key or not
  • camelcase: The camelcased value of the field

Example

Given this SQL Schema (for PostgreSQL):

CREATE SEQUENCE IF NOT EXISTS pages_id_seq;
CREATE TABLE "public"."pages" (
"id" int4 NOT NULL DEFAULT nextval('pages_id_seq'::regclass),
"title" varchar,
"body_content" text,
"category_id" int4,
PRIMARY KEY ("id")
);

The resulting mapping object will be:

{
singularName: 'page',
pluralName: 'pages',
primaryKey: 'id',
table: 'pages',
fields: {
id: {
sqlType: 'int4',
isNullable: false,
primaryKey: true,
camelcase: 'id'
},
title: {
sqlType: 'varchar',
isNullable: true,
camelcase: 'title'
},
body_content: {
sqlType: 'text',
isNullable: true,
camelcase: 'bodyContent'
},
category_id: {
sqlType: 'int4',
isNullable: true,
foreignKey: true,
camelcase: 'categoryId'
}
}
camelCasedFields: {
id: {
sqlType: 'int4',
isNullable: false,
primaryKey: true,
camelcase: 'id'
},
title: {
sqlType: 'varchar',
isNullable: true,
camelcase: 'title'
},
bodyContent: {
sqlType: 'text',
isNullable: true,
camelcase: 'bodyContent'
},
categoryId: {
sqlType: 'int4',
isNullable: true,
foreignKey: true,
camelcase: 'categoryId'
}
},
relations: []
}
+ + + + \ No newline at end of file diff --git a/docs/0.41.1/reference/sql-mapper/entities/hooks/index.html b/docs/0.41.1/reference/sql-mapper/entities/hooks/index.html new file mode 100644 index 00000000000..f86cea3f2cc --- /dev/null +++ b/docs/0.41.1/reference/sql-mapper/entities/hooks/index.html @@ -0,0 +1,17 @@ + + + + + +Hooks | Platformatic Open Source Software + + + + + +
+
Version: 0.41.1

Hooks

Entity hooks are a way to wrap the API methods for an entity and add custom behaviour.

The Platformatic DB SQL Mapper provides an addEntityHooks(entityName, spec) function that can be used to add hooks for an entity.

How to use hooks

addEntityHooks accepts two arguments:

  1. A string representing the entity name (singularized), for example 'page'.
  2. A key/value object where the key is one of the API methods (find, insert, save, delete) and the value is a callback function. The callback will be called with the original API method and the options that were passed to that method. See the example below.

Usage

'use strict'
const { connect } = require('@platformatic/sql-mapper')
const { pino } = require('pino')
const pretty = require('pino-pretty')
const logger = pino(pretty())

async function main() {
const pgConnectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
const mapper = await connect({
connectionString: pgConnectionString,
log: logger,
})
mapper.addEntityHooks('page', {
find: async (originalFind, opts) => {
// Add a `foo` field with `bar` value to each row
const res = await originalFind(opts)
return res.map((row) => {
row.foo = 'bar'
return row
})
}
})
const res = await mapper.entities.page.find({
fields: ['id', 'title',],
where: {
id: {
lt: 10
}
},
})
logger.info(res)
/**
[
0: {
"id": "5",
"title": "Page 1",
"foo": "bar"
},
1: {
"id": "6",
"title": "Page 2",
"foo": "bar"
}
]
*/
await mapper.db.dispose()
}
main()

Multiple Hooks

Multiple hooks can be added for the same entity and API method, for example:

'use strict'
const { connect } = require('@platformatic/sql-mapper')
const { pino } = require('pino')
const pretty = require('pino-pretty')
const logger = pino(pretty())

async function main() {
const pgConnectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
const mapper = await connect({
connectionString: pgConnectionString,
log: logger,
})
mapper.addEntityHooks('page', {
find: async function firstHook(previousFunction, opts) {
// Add a `foo` field with `bar` value to each row
const res = await previousFunction(opts)
return res.map((row) => {
row.foo = 'bar'
return row
})
}
})
mapper.addEntityHooks('page', {
find: async function secondHook(previousFunction, opts) {
// Add a `bar` field with `baz` value to each row
const res = await previousFunction(opts)
return res.map((row) => {
row.bar = 'baz'
return row
})
}
})
const res = await mapper.entities.page.find({
fields: ['id', 'title',],
where: {
id: {
lt: 10
}
},
})
logger.info(res)
/**
[
0: {
"id": "5",
"title": "Page 1",
"foo": "bar",
"bar": "baz"
},
1: {
"id": "6",
"title": "Page 2",
"foo": "bar",
"bar": "baz"
}
]
*/
await mapper.db.dispose()
}
main()

Since hooks are wrappers, they are being called in reverse order, like the image below

Hooks Lifecycle

So even though we defined two hooks, the Database will be hit only once.

Query result will be processed by firstHook, which will pass the result to secondHook, which will, finally, send the processed result to the original .find({...}) function.

+ + + + \ No newline at end of file diff --git a/docs/0.41.1/reference/sql-mapper/entities/introduction/index.html b/docs/0.41.1/reference/sql-mapper/entities/introduction/index.html new file mode 100644 index 00000000000..fe3efa6ba28 --- /dev/null +++ b/docs/0.41.1/reference/sql-mapper/entities/introduction/index.html @@ -0,0 +1,17 @@ + + + + + +Introduction to Entities | Platformatic Open Source Software + + + + + +
+
Version: 0.41.1

Introduction to Entities

The primary goal of Platformatic DB is to read a database schema and generate REST and GraphQL endpoints that enable the execution of CRUD (Create/Retrieve/Update/Delete) operations against the database.

Platformatic DB includes a mapper that reads the schemas of database tables and then generates an entity object for each table.

Platformatic DB is a Fastify application. The Fastify instance object is decorated with the platformatic property, which exposes several APIs that handle the manipulation of data in the database.

Platformatic DB populates the app.platformatic.entities object with data found in database tables.

The keys on the entities object are singularized versions of the table names — for example users becomes user, categories becomes category — and the values are a set of associated metadata and functions.

+ + + + \ No newline at end of file diff --git a/docs/0.41.1/reference/sql-mapper/entities/relations/index.html b/docs/0.41.1/reference/sql-mapper/entities/relations/index.html new file mode 100644 index 00000000000..710b522cb58 --- /dev/null +++ b/docs/0.41.1/reference/sql-mapper/entities/relations/index.html @@ -0,0 +1,20 @@ + + + + + +Relations | Platformatic Open Source Software + + + + + +
+
Version: 0.41.1

Relations

When Platformatic DB is reading your database schema, it identifies relationships +between tables and stores metadata on them in the entity object's relations field. +This is achieved by querying the database's internal metadata.

Example

Given this PostgreSQL schema:

CREATE SEQUENCE IF NOT EXISTS categories_id_seq;

CREATE TABLE "categories" (
"id" int4 NOT NULL DEFAULT nextval('categories_id_seq'::regclass),
"name" varchar(255) NOT NULL,
PRIMARY KEY ("id")
);

CREATE SEQUENCE IF NOT EXISTS pages_id_seq;

CREATE TABLE "pages" (
"id" int4 NOT NULL DEFAULT nextval('pages_id_seq'::regclass),
"title" varchar(255) NOT NULL,
"body_content" text,
"category_id" int4,
PRIMARY KEY ("id")
);

ALTER TABLE "pages" ADD FOREIGN KEY ("category_id") REFERENCES "categories"("id");

When this code is run:

'use strict'
const { connect } = require('@platformatic/sql-mapper')
const { pino } = require('pino')
const pretty = require('pino-pretty')
const logger = pino(pretty())

async function main() {
const pgConnectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
const mapper = await connect({
connectionString: pgConnectionString,
log: logger,
})
const pageEntity = mapper.entities.page
console.log(pageEntity.relations)
await mapper.db.dispose()
}
main()

The output will be:

[
{
constraint_catalog: 'postgres',
constraint_schema: 'public',
constraint_name: 'pages_category_id_fkey',
table_catalog: 'postgres',
table_schema: 'public',
table_name: 'pages',
constraint_type: 'FOREIGN KEY',
is_deferrable: 'NO',
initially_deferred: 'NO',
enforced: 'YES',
column_name: 'category_id',
ordinal_position: 1,
position_in_unique_constraint: 1,
foreign_table_name: 'categories',
foreign_column_name: 'id'
}
]

As Platformatic DB supports multiple database engines, the contents of the +relations object will vary depending on the database being used.

The following relations fields are common to all database engines:

  • column_name — the column that stores the foreign key
  • foreign_table_name — the table hosting the related row
  • foreign_column_name — the column in foreign table that identifies the row
+ + + + \ No newline at end of file diff --git a/docs/0.41.1/reference/sql-mapper/entities/timestamps/index.html b/docs/0.41.1/reference/sql-mapper/entities/timestamps/index.html new file mode 100644 index 00000000000..f597ba21a83 --- /dev/null +++ b/docs/0.41.1/reference/sql-mapper/entities/timestamps/index.html @@ -0,0 +1,17 @@ + + + + + +Timestamps | Platformatic Open Source Software + + + + + +
+
Version: 0.41.1

Timestamps

Timestamps can be used to automatically set the created_at and updated_at fields on your entities.

Timestamps are enabled by default

Configuration

To disable timestamps, you need to set the autoTimestamp field to false in configuration file:

{
...
"db": {
"connectionString": "postgres://postgres:postgres@127.0.0.1/postgres",
"autoTimestamp": false
},
...
}

Customizing the field names

By default, the created_at and updated_at fields are used. You can customize the field names by setting the createdAt and updatedAt options in autoTimestamp field in configuration file:

{
...
"db": {
"connectionString": "postgres://postgres:postgres@127.0.0.1/postgres",
"autoTimestamp": {
"createdAt": "inserted_at",
"updatedAt": "updated_at"
}
...
}
+ + + + \ No newline at end of file diff --git a/docs/0.41.1/reference/sql-mapper/entities/transactions/index.html b/docs/0.41.1/reference/sql-mapper/entities/transactions/index.html new file mode 100644 index 00000000000..b899359c01d --- /dev/null +++ b/docs/0.41.1/reference/sql-mapper/entities/transactions/index.html @@ -0,0 +1,18 @@ + + + + + +Transactions | Platformatic Open Source Software + + + + + +
+
Version: 0.41.1

Transactions

Platformatic DB entites support transaction through the tx optional parameter. +If the tx parameter is provided, the entity will join the transaction, e.g.:


const { connect } = require('@platformatic/sql-mapper')
const logger = pino(pretty())

async function main() {
const pgConnectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
const { db, entities} = await connect({
connectionString: pgConnectionString,
log: logger,
})

const result = await db.tx(async tx => {
// these two operations will be executed in the same transaction
const authorResult = await entities.author.save({
fields: ['id', 'name'],
input: { name: 'test'},
tx
})
const res = await entities.page.save({
fields: ['title', 'authorId'],
input: { title: 'page title', authorId: authorResult.id },
tx
})
return res
})

}

Throwing an Error triggers a transaction rollback:

    try {
await db.tx(async tx => {
await entities.page.save({
input: { title: 'new page' },
fields: ['title'],
tx
})

// here we have `new page`
const findResult = await entities.page.find({ fields: ['title'], tx })

// (...)

// We force the rollback
throw new Error('rollback')
})
} catch (e) {
// rollback
}

// no 'new page' here...
const afterRollback = await entities.page.find({ fields: ['title'] })

+ + + + \ No newline at end of file diff --git a/docs/0.41.1/reference/sql-mapper/fastify-plugin/index.html b/docs/0.41.1/reference/sql-mapper/fastify-plugin/index.html new file mode 100644 index 00000000000..f1e30d9e3cf --- /dev/null +++ b/docs/0.41.1/reference/sql-mapper/fastify-plugin/index.html @@ -0,0 +1,17 @@ + + + + + +sql-mapper Fastify Plugin | Platformatic Open Source Software + + + + + +
+
Version: 0.41.1

sql-mapper Fastify Plugin

The @platformatic/sql-mapper package exports a Fastify plugin that can be used out-of the box in a server application.

A connectionString option must be passed to connect to your database.

The plugin decorates the server with a platformatic object that has the following properties:

  • db — the DB wrapper object provided by @databases
  • sql — the SQL query mapper object provided by @databases
  • entities — all entity objects with their API methods
  • addEntityHooks — a function to add a hook to an entity API method.

The plugin also decorates the Fastify Request object with the following:

  • platformaticContext: an object with the following two properties:
    • app, the Fastify application of the given route
    • reply, the Fastify Reply instance matching that request

Usage

'use strict'

const Fastify = require('fastify')
const mapper = require('@platformatic/sql-mapper')

async function main() {
const app = Fastify({
logger: {
level: 'info'
}
})
app.register(mapper.plugin, {
connectionString: 'postgres://postgres:postgres@127.0.0.1/postgres'
})

app.get('/all-pages', async (req, reply) => {
// Optionally get the platformatic context.
// Passing this to all sql-mapper functions allow to apply
// authorization rules to the database queries (amongst other things).
const ctx = req.platformaticContext

// Will return all rows from 'pages' table
const res = await app.platformatic.entities.page.find({ ctx })
return res
})

await app.listen({ port: 3333 })
}

main()
+ + + + \ No newline at end of file diff --git a/docs/0.41.1/reference/sql-mapper/introduction/index.html b/docs/0.41.1/reference/sql-mapper/introduction/index.html new file mode 100644 index 00000000000..cde383370dd --- /dev/null +++ b/docs/0.41.1/reference/sql-mapper/introduction/index.html @@ -0,0 +1,19 @@ + + + + + +Introduction to @platformatic/sql-mapper | Platformatic Open Source Software + + + + + +
+
Version: 0.41.1

Introduction to @platformatic/sql-mapper

@platformatic/sql-mapper is the underlining utility that Platformatic DB uses to create useful utilities to +manipulate your SQL database using JavaScript.

This module is bundled with Platformatic DB via a fastify plugin +The rest of this guide shows how to use this module directly.

Install

npm i @platformatic/sql-mapper

API

connect(opts) : Promise

It will inspect a database schema and return an object containing:

  • db — A database abstraction layer from @databases
  • sql — The SQL builder from @databases
  • entities — An object containing a key for each table found in the schema, with basic CRUD operations. See Entity Reference for details.

The valid options are:

  • connectionString — The Database connection string
  • poolSize - Maximum number of connections in the connection pool. Defaults to 10.
  • log — A logger object (like Pino)
  • onDatabaseLoad — An async function that is called after the connection is established. It will receive db and sql as parameter.
  • ignore — Object used to ignore some tables from building entities. (i.e. { 'versions': true } will ignore versions table)
  • autoTimestamp — Generate timestamp automatically when inserting/updating records.
  • hooks — For each entity name (like Page) you can customize any of the entity API function. Your custom function will receive the original function as first parameter, and then all the other parameters passed to it.

createConnectionPool(opts) : Promise

It will inspect a database schema and return an object containing:

The valid options are:

  • connectionString — The Database connection string
  • poolSize - Maximum number of connections in the connection pool. Defaults to 10.
  • log — A logger object (like Pino)

This utility is useful if you just need to connect to the db without generating any entity.

Code samples

const { connect } = require('@platformatic/sql-mapper')
const { pino } = require('pino')

const logger = pino()

async function onDatabaseLoad (db, sql) {
await db.query(sql`CREATE TABLE pages (
id SERIAL PRIMARY KEY,
title VARCHAR(255) NOT NULL
);`)
}
const connectionString =
'postgres://postgres:postgres@localhost:5432/postgres'
const mapper = await connect({
connectionString,
log: logger,
onDatabaseLoad,
ignore: {},
hooks: {
Page: {
find: async function(_find, opts) {
console.log('hook called');
return await _find(opts)
}
}
}
})
const pageEntity = mapper.entities.page

await mapper.db.query(mapper.sql`SELECT * FROM pages`)
await mapper.db.find('option1', 'option2')
+ + + + \ No newline at end of file diff --git a/docs/0.41.1/reference/sql-openapi/api/index.html b/docs/0.41.1/reference/sql-openapi/api/index.html new file mode 100644 index 00000000000..fdcb4210d65 --- /dev/null +++ b/docs/0.41.1/reference/sql-openapi/api/index.html @@ -0,0 +1,22 @@ + + + + + +API | Platformatic Open Source Software + + + + + +
+
Version: 0.41.1

API

Each table is mapped to an entity named after table's name.

In the following reference we'll use some placeholders, but let's see an example

Example

Given this SQL executed against your database:

CREATE TABLE pages (
id SERIAL PRIMARY KEY,
title VARCHAR(255) NOT NULL,
body TEXT NOT NULL
);
  • [PLURAL_ENTITY_NAME] is pages
  • [SINGULAR_ENTITY_NAME] is page
  • [PRIMARY_KEY] is id
  • fields are id, title, body

GET and POST parameters

Some APIs needs the GET method, where parameters must be defined in the URL, or POST/PUT methods, where parameters can be defined in the http request payload.

Fields

Every API can define a fields parameter, representing the entity fields you want to get back for each row of the table. If not specified all fields are returned.

fields parameter are always sent in query string, even for POST, PUT and DELETE requests, as a comma separated value.

## `GET /[PLURAL_ENTITY_NAME]`

Return all entities matching where clause

Where clause

You can define many WHERE clauses in REST API, each clause includes a field, an operator and a value.

The field is one of the fields found in the schema.

The operator follows this table:

Platformatic operatorSQL operator
eq'='
in'IN'
nin'NOT IN'
neq'<>'
gt'>'
gte'>='
lt'<'
lte'<='

The value is the value you want to compare the field to.

For GET requests all these clauses are specified in the query string using the format where.[FIELD].[OPERATOR]=[VALUE]

Example

If you want to get the title and the body of every page where id < 15 you can make an HTTP request like this:

$ curl -X 'GET' \
'http://localhost:3042/pages/?fields=body,title&where.id.lt=15' \
-H 'accept: application/json'

Where clause operations are by default combined with the AND operator. To create an OR condition use the where.or query param.

Each where.or query param can contain multiple conditions separated by a | (pipe).

The where.or conditions are similar to the where conditions, except that they don't have the where prefix.

Example

If you want to get the posts where counter = 10 OR counter > 30 you can make an HTTP request like this:

$ curl -X 'GET' \
'http://localhost:3042/pages/?where.or=(counter.eq=10|counter.gte=30)' \
-H 'accept: application/json'

OrderBy clause

You can define the ordering of the returned rows within your REST API calls with the orderby clause using the following pattern:

?orderby.[field]=[asc | desc]

The field is one of the fields found in the schema. +The value can be asc or desc.

Example

If you want to get the pages ordered alphabetically by their titles you can make an HTTP request like this:

$ curl -X 'GET' \
'http://localhost:3042/pages?orderby.title=asc' \
-H 'accept: application/json'

Total Count

If totalCount boolean is true in query, the GET returns the total number of elements in the X-Total-Count header ignoring limit and offset (if specified).

$ curl -v -X 'GET' \
'http://localhost:3042/pages/?limit=2&offset=0&totalCount=true' \
-H 'accept: application/json'

(...)
> HTTP/1.1 200 OK
> x-total-count: 18
(...)

[{"id":1,"title":"Movie1"},{"id":2,"title":"Movie2"}]%

POST [PLURAL_ENTITY_NAME]

Creates a new row in table. Expects fields to be sent in a JSON formatted request body.

Example

$ curl -X 'POST' \
'http://localhost:3042/pages/' \
-H 'accept: application/json' \
-H 'Content-Type: application/json' \
-d '{
"title": "Hello World",
"body": "Welcome to Platformatic!"
}'

{
"id": 1,
"title": "Hello World",
"body": "Welcome to Platformatic"
}

GET [PLURAL_ENTITY_NAME]/[PRIMARY_KEY]

Returns a single row, identified by PRIMARY_KEY.

Example

$ curl -X 'GET' 'http://localhost:3042/pages/1?fields=title,body

{
"title": "Hello World",
"body": "Welcome to Platformatic"
}

POST [PLURAL_ENTITY_NAME]/[PRIMARY_KEY]

Updates a row identified by PRIMARY_KEY.

Example

$ curl -X 'POST' \
'http://localhost:3042/pages/1' \
-H 'accept: application/json' \
-H 'Content-Type: application/json' \
-d '{
"title": "Hello Platformatic!",
"body": "Welcome to Platformatic!"
}'

{
"id": 1,
"title": "Hello Platformatic!",
"body": "Welcome to Platformatic"
}

PUT [PLURAL_ENTITY_NAME]/[PRIMARY_KEY]

Same as POST [PLURAL_ENTITY_NAME]/[PRIMARY_KEY].

## `PUT [PLURAL_ENTITY_NAME]`

Updates all entities matching where clause

Example

$ curl -X 'PUT' \
'http://localhost:3042/pages?where.id.in=1,2' \
-H 'accept: application/json' \
-H 'Content-Type: application/json' \
-d '{
"title": "Updated title!",
"body": "Updated body!"
}'

[{
"id": 1,
"title": "Updated title!",
"body": "Updated body!"
},{
"id": 2,
"title": "Updated title!",
"body": "Updated body!"
}]

DELETE [PLURAL_ENTITY_NAME]/[PRIMARY_KEY]

Deletes a row identified by the PRIMARY_KEY.

Example

$ curl -X 'DELETE' 'http://localhost:3042/pages/1?fields=title'

{
"title": "Hello Platformatic!"
}

Nested Relationships

Let's consider the following SQL:

CREATE TABLE IF NOT EXISTS movies (
movie_id INTEGER PRIMARY KEY,
title TEXT NOT NULL
);
CREATE TABLE IF NOT EXISTS quotes (
id INTEGER PRIMARY KEY,
quote TEXT NOT NULL,
movie_id INTEGER NOT NULL REFERENCES movies(movie_id)
);

And:

  • [P_PARENT_ENTITY] is movies
  • [S_PARENT_ENTITY] is movie
  • [P_CHILDREN_ENTITY] is quotes
  • [S_CHILDREN_ENTITY] is quote

In this case, more APIs are available:

GET [P_PARENT_ENTITY]/[PARENT_PRIMARY_KEY]/[P_CHILDREN_ENTITY]

Given a 1-to-many relationship, where a parent entity can have many children, you can query for the children directly.

$ curl -X 'GET' 'http://localhost:3042/movies/1/quotes?fields=quote

[
{
"quote": "I'll be back"
},
{
"quote": "Hasta la vista, baby"
}
]

GET [P_CHILDREN_ENTITY]/[CHILDREN_PRIMARY_KEY]/[S_PARENT_ENTITY]

You can query for the parent directly, e.g.:

$ curl -X 'GET' 'http://localhost:3042/quotes/1/movie?fields=title

{
"title": "Terminator"
}

Many-to-Many Relationships

Many-to-Many relationship lets you relate each row in one table to many rows in +another table and vice versa.

Many-to-many relationship are implemented in SQL via a "join table", a table whose primary key +is composed by the identifier of the two parts of the many-to-many relationship.

Platformatic DB fully support many-to-many relationships on all supported database.

Let's consider the following SQL:

CREATE TABLE pages (
id INTEGER PRIMARY KEY,
the_title VARCHAR(42)
);

CREATE TABLE users (
id INTEGER PRIMARY KEY,
username VARCHAR(255) NOT NULL
);

CREATE TABLE editors (
page_id INTEGER NOT NULL,
user_id INTEGER NOT NULL,
role VARCHAR(255) NOT NULL,
CONSTRAINT fk_editor_pages FOREIGN KEY (page_id) REFERENCES pages(id),
CONSTRAINT fk_editor_users FOREIGN KEY (user_id) REFERENCES users(id),
PRIMARY KEY (page_id, user_id)
);

And:

  • [P_ENTITY] is editors
  • [P_REL_1] is pages
  • [S_REL_1] is page
  • [KEY_REL_1] is pages PRIMARY KEY: pages(id)
  • [P_REL_2] is users
  • [S_REL_2] is user
  • [KEY_REL_2] is users PRIMARY KEY: users(id)

In this case, here the APIs that are available for the join table:

GET [P_ENTITY]/[S_REL_1]/[KEY_REL_1]/[S_REL_2]/[KEY_REL_2]

This returns the entity in the "join table", e.g. GET /editors/page/1/user/1.

POST [P_ENTITY]/[S_REL_1]/[KEY_REL_1]/[S_REL_2]/[KEY_REL_2]

Creates a new entity in the "join table", e.g. POST /editors/page/1/user/1.

PUT [P_ENTITY]/[S_REL_1]/[KEY_REL_1]/[S_REL_2]/[KEY_REL_2]

Updates an entity in the "join table", e.g. PUT /editors/page/1/user/1.

DELETE [P_ENTITY]/[S_REL_1]/[KEY_REL_1]/[S_REL_2]/[KEY_REL_2]

Delete the entity in the "join table", e.g. DELETE /editors/page/1/user/1.

GET /[P_ENTITY]

See the above.

Offset only accepts values >= 0. Otherwise an error is return.

Pagination

The Platformatic DB supports for result's pagination through input parameters: limit and offset

Example

$ curl -X 'GET' 'http://localhost:3042/movies?limit=5&offset=10

[
{
"title": "Star Wars",
"movie_id": 10
},
...
{
"title": "007",
"movie_id": 14
}
]

It returns 5 movies starting from position 10.

TotalCount functionality can be used in order to evaluate if there are more pages.

Limit

By default a limit value (10) is applied to each request.

Clients can override this behavior by passing a value. +In this case the server validates the input and an error is return if exceeds the max accepted value (100).

Limit's values can be customized through configuration:

{
...
"db": {
...
"limit": {
"default": 50,
"max": 1000
}
}
}

Limit only accepts values >= 0. Otherwise an error is return.

Offset

By default offset is not applied to the request. +Clients can override this behavior by passing a value.

Offset only accepts values >= 0. Otherwise an error is return.

+ + + + \ No newline at end of file diff --git a/docs/0.41.1/reference/sql-openapi/ignore/index.html b/docs/0.41.1/reference/sql-openapi/ignore/index.html new file mode 100644 index 00000000000..62682450397 --- /dev/null +++ b/docs/0.41.1/reference/sql-openapi/ignore/index.html @@ -0,0 +1,17 @@ + + + + + +Ignoring entities and fields | Platformatic Open Source Software + + + + + +
+
Version: 0.41.1

Ignoring entities and fields

@platformatic/sql-openapi allows to selectively ignore entities and fields.

To ignore entites:

app.register(require('@platformatic/sql-openapi'), {
ignore: {
categories: true
}
})

To ignore individual fields:

app.register(require('@platformatic/sql-openapi'), {
ignore: {
categories: {
name: true
}
}
})
+ + + + \ No newline at end of file diff --git a/docs/0.41.1/reference/sql-openapi/introduction/index.html b/docs/0.41.1/reference/sql-openapi/introduction/index.html new file mode 100644 index 00000000000..13edb860714 --- /dev/null +++ b/docs/0.41.1/reference/sql-openapi/introduction/index.html @@ -0,0 +1,17 @@ + + + + + +Introduction to the REST API | Platformatic Open Source Software + + + + + +
+
Version: 0.41.1

Introduction to the REST API

The Platformatic DB OpenAPI plugin automatically starts a REST API server (powered by Fastify) that provides CRUD (Create, Read, Update, Delete) functionality for each entity.

Configuration

In the config file, under the "db" section, the OpenAPI server is enabled by default. Although you can disable it setting the property openapi to false.

Example

{
...
"db": {
"openapi": false
}
}

As Platformatic DB uses fastify-swagger under the hood, the "openapi" property can be an object that follows the OpenAPI Specification Object format.

This allows you to extend the output of the Swagger UI documentation.

+ + + + \ No newline at end of file diff --git a/docs/0.41.2/category/getting-started/index.html b/docs/0.41.2/category/getting-started/index.html new file mode 100644 index 00000000000..f602eba55e9 --- /dev/null +++ b/docs/0.41.2/category/getting-started/index.html @@ -0,0 +1,17 @@ + + + + + +Getting Started | Platformatic Open Source Software + + + + + + + + + + \ No newline at end of file diff --git a/docs/0.41.2/category/guides/index.html b/docs/0.41.2/category/guides/index.html new file mode 100644 index 00000000000..b7d5c3bca53 --- /dev/null +++ b/docs/0.41.2/category/guides/index.html @@ -0,0 +1,17 @@ + + + + + +Guides | Platformatic Open Source Software + + + + + +
+
Version: 0.41.2

Guides

+ + + + \ No newline at end of file diff --git a/docs/0.41.2/category/packages/index.html b/docs/0.41.2/category/packages/index.html new file mode 100644 index 00000000000..516a034c429 --- /dev/null +++ b/docs/0.41.2/category/packages/index.html @@ -0,0 +1,17 @@ + + + + + +Packages | Platformatic Open Source Software + + + + + + + + + + \ No newline at end of file diff --git a/docs/0.41.2/category/platformatic-cloud/index.html b/docs/0.41.2/category/platformatic-cloud/index.html new file mode 100644 index 00000000000..4cd3720f26d --- /dev/null +++ b/docs/0.41.2/category/platformatic-cloud/index.html @@ -0,0 +1,17 @@ + + + + + +Platformatic Cloud | Platformatic Open Source Software + + + + + + + + + + \ No newline at end of file diff --git a/docs/0.41.2/category/reference/index.html b/docs/0.41.2/category/reference/index.html new file mode 100644 index 00000000000..2258404fd2b --- /dev/null +++ b/docs/0.41.2/category/reference/index.html @@ -0,0 +1,17 @@ + + + + + +Reference | Platformatic Open Source Software + + + + + + + + + + \ No newline at end of file diff --git a/docs/0.41.2/contributing/documentation-style-guide/index.html b/docs/0.41.2/contributing/documentation-style-guide/index.html new file mode 100644 index 00000000000..740bd39cf72 --- /dev/null +++ b/docs/0.41.2/contributing/documentation-style-guide/index.html @@ -0,0 +1,74 @@ + + + + + +Documentation Style Guide | Platformatic Open Source Software + + + + + +
+
Version: 0.41.2

Documentation Style Guide

Welcome to the Platformatic Documentation Style Guide. This guide is here to provide +you with a conventional writing style for users writing developer documentation on +our Open Source framework. Each topic is precise and well explained to help you write +documentation users can easily understand and implement.

Who is This Guide For?

This guide is for anyone who loves to build with Platformatic or wants to contribute +to our documentation. You do not need to be an expert in writing technical +documentation. This guide is here to help you.

Visit CONTRIBUTING.md +file on GitHub to join our Open Source folks.

Before you Write

You should have a basic understanding of:

  • JavaScript
  • Node.js
  • Git
  • GitHub
  • Markdown
  • HTTP
  • NPM

Consider Your Audience

Before you start writing, think about your audience. In this case, your audience +should already know HTTP, JavaScript, NPM, and Node.js. It is necessary to keep +your readers in mind because they are the ones consuming your content. You want +to give as much useful information as possible. Consider the vital things they +need to know and how they can understand them. Use words and references that +readers can relate to easily. Ask for feedback from the community, it can help +you write better documentation that focuses on the user and what you want to +achieve.

Get Straight to the Point

Give your readers a clear and precise action to take. Start with what is most +important. This way, you can help them find what they need faster. Mostly, +readers tend to read the first content on a page, and many will not scroll +further.

Example

Less like this:

Colons are very important to register a parametric path. It lets +the framework know there is a new parameter created. You can place the colon +before the parameter name so the parametric path can be created.

More Like this:

To register a parametric path, put a colon before the parameter +name. Using a colon lets the framework know it is a parametric path and not a +static path.

Images and Video Should Enhance the Written Documentation

Images and video should only be added if they complement the written +documentation, for example to help the reader form a clearer mental model of a +concept or pattern.

Images can be directly embedded, but videos should be included by linking to an +external site, such as YouTube. You can add links by using +[Title](https://www.websitename.com) in the Markdown.

Avoid Plagiarism

Make sure you avoid copying other people's work. Keep it as original as +possible. You can learn from what they have done and reference where it is from +if you used a particular quote from their work.

Word Choice

There are a few things you need to use and avoid when writing your documentation +to improve readability for readers and make documentation neat, direct, and +clean.

When to use the Second Person "you" as the Pronoun

When writing articles or guides, your content should communicate directly to +readers in the second person ("you") addressed form. It is easier to give them +direct instruction on what to do on a particular topic. To see an example, visit +the Quick Start Guide.

Example

Less like this:

We can use the following plugins.

More like this:

You can use the following plugins.

According to Wikipedia, You is usually a second person pronoun. +Also, used to refer to an indeterminate person, as a more common alternative +to a very formal indefinite pronoun.

To recap, use "you" when writing articles or guides.

When to Avoid the Second Person "you" as the Pronoun

One of the main rules of formal writing such as reference documentation, or API +documentation, is to avoid the second person ("you") or directly addressing the +reader.

Example

Less like this:

You can use the following recommendation as an example.

More like this:

As an example, the following recommendations should be +referenced.

To view a live example, refer to the Decorators +reference document.

To recap, avoid "you" in reference documentation or API documentation.

Avoid Using Contractions

Contractions are the shortened version of written and spoken forms of a word, +i.e. using "don't" instead of "do not". Avoid contractions to provide a more +formal tone.

Avoid Using Condescending Terms

Condescending terms are words that include:

  • Just
  • Easy
  • Simply
  • Basically
  • Obviously

The reader may not find it easy to use Platformatic; avoid +words that make it sound simple, easy, offensive, or insensitive. Not everyone +who reads the documentation has the same level of understanding.

Starting With a Verb

Mostly start your description with a verb, which makes it simple and precise for +the reader to follow. Prefer using present tense because it is easier to read +and understand than the past or future tense.

Example

Less like this:

There is a need for Node.js to be installed before you can be +able to use Platformatic.

More like this:

Install Node.js to make use of Platformatic.

Grammatical Moods

Grammatical moods are a great way to express your writing. Avoid sounding too +bossy while making a direct statement. Know when to switch between indicative, +imperative, and subjunctive moods.

Indicative - Use when making a factual statement or question.

Example

Since there is no testing framework available, "Platformatic recommends ways +to write tests".

Imperative - Use when giving instructions, actions, commands, or when you +write your headings.

Example

Install dependencies before starting development.

Subjunctive - Use when making suggestions, hypotheses, or non-factual +statements.

Example

Reading the documentation on our website is recommended to get +comprehensive knowledge of the framework.

Use Active Voice Instead of Passive

Using active voice is a more compact and direct way of conveying your +documentation.

Example

Passive:

The node dependencies and packages are installed by npm.

Active:

npm installs packages and node dependencies.

Writing Style

Documentation Titles

When creating a new guide, API, or reference in the /docs/ directory, use +short titles that best describe the topic of your documentation. Name your files +in kebab-cases and avoid Raw or camelCase. To learn more about kebab-case you +can visit this medium article on Case +Styles.

Examples:

hook-and-plugins.md

adding-test-plugins.md

removing-requests.md

Hyperlinks should have a clear title of what it references. Here is how your +hyperlink should look:

<!-- More like this -->

// Add clear & brief description
[Fastify Plugins] (https://www.fastify.io/docs/latest/Plugins/)

<!--Less like this -->

// incomplete description
[Fastify] (https://www.fastify.io/docs/latest/Plugins/)

// Adding title in link brackets
[](https://www.fastify.io/docs/latest/Plugins/ "fastify plugin")

// Empty title
[](https://www.fastify.io/docs/latest/Plugins/)

// Adding links localhost URLs instead of using code strings (``)
[http://localhost:3000/](http://localhost:3000/)

Include in your documentation as many essential references as possible, but +avoid having numerous links when writing to avoid distractions.

+ + + + \ No newline at end of file diff --git a/docs/0.41.2/contributing/index.html b/docs/0.41.2/contributing/index.html new file mode 100644 index 00000000000..6d8ef096030 --- /dev/null +++ b/docs/0.41.2/contributing/index.html @@ -0,0 +1,18 @@ + + + + + +Contributing | Platformatic Open Source Software + + + + + +
+
+ + + + \ No newline at end of file diff --git a/docs/0.41.2/getting-started/architecture/index.html b/docs/0.41.2/getting-started/architecture/index.html new file mode 100644 index 00000000000..6869494f19e --- /dev/null +++ b/docs/0.41.2/getting-started/architecture/index.html @@ -0,0 +1,25 @@ + + + + + +Architecture | Platformatic Open Source Software + + + + + +
+
Version: 0.41.2

Architecture

Platformatic is a collection of Open Source tools designed to eliminate friction +in backend development. The first of those tools is Platformatic DB, which is developed +as @platformatic/db.

Platformatic DB

Platformatic DB can expose a SQL database by dynamically mapping it to REST/OpenAPI +and GraphQL endpoints. It supports a limited subset of the SQL query language, but +also allows developers to add their own custom routes and resolvers.

Platformatic DB Architecture

Platformatic DB is composed of a few key libraries:

  1. @platformatic/sql-mapper - follows the Data Mapper pattern to build an API on top of a SQL database. +Internally it uses the @database project.
  2. @platformatic/sql-openapi - uses sql-mapper to create a series of REST routes and matching OpenAPI definitions. +Internally it uses @fastify/swagger.
  3. @platformatic/sql-graphql - uses sql-mapper to create a GraphQL endpoint and schema. sql-graphql also support Federation. +Internally it uses mercurius.

Platformatic DB allows you to load a Fastify plugin during server startup that contains your own application-specific code. +The plugin can add more routes or resolvers — these will automatically be shown in the OpenAPI and GraphQL schemas.

SQL database migrations are also supported. They're implemented internally with the postgrator library.

+ + + + \ No newline at end of file diff --git a/docs/0.41.2/getting-started/movie-quotes-app-tutorial/index.html b/docs/0.41.2/getting-started/movie-quotes-app-tutorial/index.html new file mode 100644 index 00000000000..5a01b2ac8f2 --- /dev/null +++ b/docs/0.41.2/getting-started/movie-quotes-app-tutorial/index.html @@ -0,0 +1,129 @@ + + + + + +Movie Quotes App Tutorial | Platformatic Open Source Software + + + + + +
+
Version: 0.41.2

Movie Quotes App Tutorial

This tutorial will help you learn how to build a full stack application on top +of Platformatic DB. We're going to build an application that allows us to +save our favourite movie quotes. We'll also be building in custom API functionality +that allows for some neat user interaction on our frontend.

You can find the complete code for the application that we're going to build +on GitHub.

note

We'll be building the frontend of our application with the Astro +framework, but the GraphQL API integration steps that we're going to cover can +be applied with most frontend frameworks.

What we're going to cover

In this tutorial we'll learn how to:

  • Create a Platformatic API
  • Apply database migrations
  • Create relationships between our API entities
  • Populate our database tables
  • Build a frontend application that integrates with our GraphQL API
  • Extend our API with custom functionality
  • Enable CORS on our Platformatic API

Prerequisites

To follow along with this tutorial you'll need to have these things installed:

You'll also need to have some experience with JavaScript, and be comfortable with +running commands in a terminal.

Build the backend

Create a Platformatic API

First, let's create our project directory:

mkdir -p tutorial-movie-quotes-app/apps/movie-quotes-api/

cd tutorial-movie-quotes-app/apps/movie-quotes-api/

Run this command in your terminal to start the Platformatic creator wizard:

npm create platformatic@latest

This interactive command-line tool will ask you some questions about how you'd +like to set up your new Platformatic project. For this guide, select these options:

- Which kind of project do you want to create?  => DB
- Where would you like to create your project? => quick-start
- Do you want to create default migrations? => Yes
- Do you want to create a plugin? => Yes
- Do you want to use TypeScript? => No
- Do you want to install dependencies? => Yes (this can take a while)
- Do you want to apply the migrations? => Yes
- Do you want to generate types? => Yes
- Do you want to create the github action to deploy this application to Platformatic Cloud dynamic workspace? => No
- Do you want to create the github action to deploy this application to Platformatic Cloud static workspace? => No

Once the wizard is complete, you'll have a Platformatic app project in the +folder quick-start, with example migration files and a plugin script.

info

Make sure you run the npm/yarn/pnpm command install command manually if you +don't ask the wizard to do it for you.

Define the database schema

Let's create a new directory to store our migration files:

mkdir migrations

Then we'll create a migration file named 001.do.sql in the migrations +directory:

CREATE TABLE quotes (
id INTEGER PRIMARY KEY,
quote TEXT NOT NULL,
said_by VARCHAR(255) NOT NULL,
created_at DATETIME DEFAULT CURRENT_TIMESTAMP
);

Now let's setup migrations in our Platformatic configuration +file, platformatic.db.json:

{
"$schema": "https://platformatic.dev/schemas/v0.23.2/db",
"server": {
"hostname": "{PLT_SERVER_HOSTNAME}",
"port": "{PORT}",
"logger": {
"level": "{PLT_SERVER_LOGGER_LEVEL}"
}
},
"db": {
"connectionString": "{DATABASE_URL}",
"graphql": true,
"openapi": true
},
"plugins": {
"paths": [
"plugin.js"
]
},
"types": {
"autogenerate": true
},
"migrations": {
"dir": "migrations",
"autoApply": true
}
}
info

Take a look at the Configuration reference +to see all the supported configuration settings.

Now we can start the Platformatic DB server:

npm run start

Our Platformatic DB server should start, and we'll see messages like these:

[11:26:48.772] INFO (15235): running 001.do.sql
[11:26:48.864] INFO (15235): server listening
url: "http://127.0.0.1:3042"

Let's open a new terminal and make a request to our server's REST API that +creates a new quote:

curl --request POST --header "Content-Type: application/json" \
-d "{ \"quote\": \"Toto, I've got a feeling we're not in Kansas anymore.\", \"saidBy\": \"Dorothy Gale\" }" \
http://localhost:3042/quotes

We should receive a response like this from the API:

{"id":1,"quote":"Toto, I've got a feeling we're not in Kansas anymore.","saidBy":"Dorothy Gale","createdAt":"1684167422600"}

Create an entity relationship

Now let's create a migration file named 002.do.sql in the migrations +directory:

CREATE TABLE movies (
id INTEGER PRIMARY KEY,
name TEXT NOT NULL UNIQUE
);

ALTER TABLE quotes ADD COLUMN movie_id INTEGER REFERENCES movies(id);

This SQL will create a new movies database table and also add a movie_id +column to the quotes table. This will allow us to store movie data in the +movies table and then reference them by ID in our quotes table.

Let's stop the Platformatic DB server with Ctrl + C, and then start it again:

npm run start

The new migration should be automatically applied and we'll see the log message +running 002.do.sql.

Our Platformatic DB server also provides a GraphQL API. Let's open up the GraphiQL +application in our web browser:

http://localhost:3042/graphiql

Now let's run this query with GraphiQL to add the movie for the quote that we +added earlier:

mutation {
saveMovie(input: { name: "The Wizard of Oz" }) {
id
}
}

We should receive a response like this from the API:

{
"data": {
"saveMovie": {
"id": "1"
}
}
}

Now we can update our quote to reference the movie:

mutation {
saveQuote(input: { id: 1, movieId: 1 }) {
id
quote
saidBy
createdAt
movie {
id
name
}
}
}

We should receive a response like this from the API:

{
"data": {
"saveQuote": {
"id": "1",
"quote": "Toto, I've got a feeling we're not in Kansas anymore.",
"saidBy": "Dorothy Gale",
"movie": {
"id": "1",
"name": "The Wizard of Oz"
}
}
}
}

Our Platformatic DB server has automatically identified the relationship +between our quotes and movies database tables. This allows us to make +GraphQL queries that retrieve quotes and their associated movies at the same +time. For example, to retrieve all quotes from our database we can run:

query {
quotes {
id
quote
saidBy
createdAt
movie {
id
name
}
}
}

To view the GraphQL schema that's generated for our API by Platformatic DB, +we can run this command in our terminal:

npx platformatic db schema graphql

The GraphQL schema shows all of the queries and mutations that we can run +against our GraphQL API, as well as the types of data that it expects as input.

Populate the database

Our movie quotes database is looking a little empty! We're going to create a +"seed" script to populate it with some data.

Let's create a new file named seed.js and copy and paste in this code:

'use strict'

const quotes = [
{
quote: "Toto, I've got a feeling we're not in Kansas anymore.",
saidBy: 'Dorothy Gale',
movie: 'The Wizard of Oz'
},
{
quote: "You're gonna need a bigger boat.",
saidBy: 'Martin Brody',
movie: 'Jaws'
},
{
quote: 'May the Force be with you.',
saidBy: 'Han Solo',
movie: 'Star Wars'
},
{
quote: 'I have always depended on the kindness of strangers.',
saidBy: 'Blanche DuBois',
movie: 'A Streetcar Named Desire'
}
]

module.exports = async function ({ entities, db, sql }) {
for (const values of quotes) {
const movie = await entities.movie.save({ input: { name: values.movie } })

console.log('Created movie:', movie)

const quote = {
quote: values.quote,
saidBy: values.saidBy,
movieId: movie.id
}

await entities.quote.save({ input: quote })

console.log('Created quote:', quote)
}
}
info

Take a look at the Seed a Database guide to learn more +about how database seeding works with Platformatic DB.

Let's stop our Platformatic DB server running and remove our SQLite database:

rm db.sqlite

Now let's create a fresh SQLite database by running our migrations:

npx platformatic db migrations apply

And then let's populate the quotes and movies tables with data using our +seed script:

npx platformatic db seed seed.js

Our database is full of data, but we don't have anywhere to display it. It's +time to start building our frontend!

Build the frontend

We're now going to use Astro to build our frontend +application. If you've not used it before, you might find it helpful +to read this overview +on how Astro components are structured.

tip

Astro provide some extensions and tools to help improve your +Editor Setup when building an +Astro application.

Create an Astro application

In the root tutorial-movie-quotes-app of our project, let's create a new directory for our frontent +application:

mkdir -p apps/movie-quotes-frontend/

cd apps/movie-quotes-frontend/

And then we'll create a new Astro project:

npm create astro@latest -- --template basics

It will ask you some questions about how you'd like to set up +your new Astro project. For this guide, select these options:

Where should we create your new project?

   .
◼ tmpl Using basics as project template
✔ Template copied

Install dependencies? (it's buggy, we'll do it afterwards)

   No
◼ No problem! Remember to install dependencies after setup.

Do you plan to write TypeScript?

   No
◼ No worries! TypeScript is supported in Astro by default, but you are free to continue writing JavaScript instead.

Initialize a new git repository?

   No
◼ Sounds good! You can always run git init manually.

Liftoff confirmed. Explore your project!
Run npm dev to start the dev server. CTRL+C to stop.
Add frameworks like react or tailwind using astro add.

Now we'll edit our Astro configuration file, astro.config.mjs and +copy and paste in this code:

import { defineConfig } from 'astro/config'

// https://astro.build/config
export default defineConfig({
output: 'server'
})

And we'll also edit our tsconfig.json file and add in this configuration:

{
"extends": "astro/tsconfigs/base",
"compilerOptions": {
"types": ["astro/client"]
}
}

Now we can start up the Astro development server with:

npm run dev

And then load up the frontend in our browser at http://localhost:3000

Now that everything is working, we'll remove all default *.astro files from the src/ directory, but we'll keep the directory structure. You can delete them now, or override them later.

Create a layout

In the src/layouts directory, let's create a new file named Layout.astro:

---
export interface Props {
title: string;
page?: string;
}
const { title, page } = Astro.props;
---

<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width" />
<title>{title}</title>
</head>
<body>
<header>
<h1>🎬 Movie Quotes</h1>
</header>
<nav>
<a href="/">All quotes</a>
</nav>
<section>
<slot />
</section>
</body>
</html>

The code between the --- is known as the component script, and the +code after that is the component template. The component script will only run +on the server side when a web browser makes a request. The component template +is rendered server side and sent back as an HTML response to the web browser.

Now we'll update src/pages/index.astro to use this Layout component. +Let's replace the contents of src/pages/index.astro with this code:

---
import Layout from '../layouts/Layout.astro';
---

<Layout title="All quotes" page="listing">
<main>
<p>We'll list all the movie quotes here.</p>
</main>
</Layout>

Integrate the urql GraphQL client

We're now going to integrate the URQL +GraphQL client into our frontend application. This will allow us to run queries +and mutations against our Platformatic GraphQL API.

Let's first install @urql/core and +graphql as project dependencies:

npm install @urql/core graphql

Then let's create a new .env file and add this configuration:

PUBLIC_GRAPHQL_API_ENDPOINT=http://127.0.0.1:3042/graphql

Now we'll create a new directory:

mkdir src/lib

And then create a new file named src/lib/quotes-api.js. In that file we'll +create a new URQL client:

// src/lib/quotes-api.js

import { createClient, cacheExchange, fetchExchange } from '@urql/core';

const graphqlClient = createClient({
url: import.meta.env.PUBLIC_GRAPHQL_API_ENDPOINT,
requestPolicy: "network-only",
exchanges: [cacheExchange, fetchExchange]
});

We'll also add a thin wrapper around the client that does some basic error +handling for us:

// src/lib/quotes-api.js

async function graphqlClientWrapper(method, gqlQuery, queryVariables = {}) {
const queryResult = await graphqlClient[method](
gqlQuery,
queryVariables
).toPromise();

if (queryResult.error) {
console.error("GraphQL error:", queryResult.error);
}

return {
data: queryResult.data,
error: queryResult.error,
};
}

export const quotesApi = {
async query(gqlQuery, queryVariables = {}) {
return await graphqlClientWrapper("query", gqlQuery, queryVariables);
},
async mutation(gqlQuery, queryVariables = {}) {
return await graphqlClientWrapper("mutation", gqlQuery, queryVariables);
}
}

And lastly, we'll export gql from the @urql/core package, to make it +simpler for us to write GraphQL queries in our pages:

// src/lib/quotes-api.js

export { gql } from "@urql/core";

Stop the Astro dev server and then start it again so it picks up the .env +file:

npm run dev

Display all quotes

Let's display all the movie quotes in src/pages/index.astro.

First, we'll update the component script at the top and add in a query to +our GraphQL API for quotes:

---
import Layout from '../layouts/Layout.astro';
import { quotesApi, gql } from '../lib/quotes-api';

const { data } = await quotesApi.query(gql`
query {
quotes {
id
quote
saidBy
createdAt
movie {
id
name
}
}
}
`);

const quotes = data?.quotes || [];
---

Then we'll update the component template to display the quotes:

<Layout title="All quotes" page="listing">
<main>
{quotes.length > 0 ? quotes.map((quote) => (
<div>
<blockquote>
<p>{quote.quote}</p>
</blockquote>
<p>
{quote.saidBy}, {quote.movie?.name}
</p>
<div>
<span>Added {new Date(Number(quote.createdAt)).toUTCString()}</span>
</div>
</div>
)) : (
<p>No movie quotes have been added.</p>
)}
</main>
</Layout>

And just like that, we have all the movie quotes displaying on the page!

Integrate Tailwind for styling

Automatically add the @astrojs/tailwind integration:

npx astro add tailwind --yes

Add the Tailwind CSS Typography +and Forms plugins:

npm install --save-dev @tailwindcss/typography @tailwindcss/forms

Import the plugins in our Tailwind configuration file:

// tailwind.config.cjs

/** @type {import('tailwindcss').Config} */
module.exports = {
content: ['./src/**/*.{astro,html,js,jsx,md,mdx,svelte,ts,tsx,vue}'],
theme: {
extend: {}
},
plugins: [
require('@tailwindcss/forms'),
require('@tailwindcss/typography')
]
}

Stop the Astro dev server and then start it again so it picks up all the +configuration changes:

npm run dev

Style the listing page

To style our listing page, let's add CSS classes to the component template in +src/layouts/Layout.astro:

---
export interface Props {
title: string;
page?: string;
}

const { title, page } = Astro.props;

const navActiveClasses = "font-bold bg-yellow-400 no-underline";
---

<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width" />
<title>{title}</title>
</head>
<body class="py-8">
<header class="prose mx-auto mb-6">
<h1>🎬 Movie Quotes</h1>
</header>
<nav class="prose mx-auto mb-6 border-y border-gray-200 flex">
<a href="/" class={`p-3 ${page === "listing" && navActiveClasses}`}>All quotes</a>
</nav>
<section class="prose mx-auto">
<slot />
</section>
</body>
</html>

Then let's add CSS classes to the component template in src/pages/index.astro:

<Layout title="All quotes">
<main>
{quotes.length > 0 ? quotes.map((quote) => (
<div class="border-b mb-6">
<blockquote class="text-2xl mb-0">
<p class="mb-4">{quote.quote}</p>
</blockquote>
<p class="text-xl mt-0 mb-8 text-gray-400">
{quote.saidBy}, {quote.movie?.name}
</p>
<div class="flex flex-col mb-6 text-gray-400">
<span class="text-gray-400 italic">Added {new Date(Number(quote.createdAt)).toUTCString()}</span>
</div>
</div>
)) : (
<p>No movie quotes have been added.</p>
)}
</main>
</Layout>

Our listing page is now looking much more user friendly!

Create an add quote page

We're going to create a form component that we can use for adding and editing +quotes.

First let's create a new component file, src/components/QuoteForm.astro:

---
export interface QuoteFormData {
id?: number;
quote?: string;
saidBy?: string;
movie?: string;
}

export interface Props {
action: string;
values?: QuoteFormData;
saveError?: boolean;
loadError?: boolean;
submitLabel: string;
}

const { action, values = {}, saveError, loadError, submitLabel } = Astro.props;
---

{saveError && <p class="text-lg bg-red-200 p-4">There was an error saving the quote. Please try again.</p>}
{loadError && <p class="text-lg bg-red-200 p-4">There was an error loading the quote. Please try again.</p>}

<form method="post" action={action} class="grid grid-cols-1 gap-6">
<label for="quote" class="block">
<span>Quote</span>
<textarea id="quote" name="quote" required="required" class="mt-1 w-full">{values.quote}</textarea>
</label>
<label for="said-by" class="block">
<span>Said by</span>
<input type="text" id="said-by" name="saidBy" required="required" value={values.saidBy} class="mt-1 w-full">
</label>
<label for="movie" class="block">
<span>Movie</span>
<input type="text" id="movie" name="movie" required="required" autocomplete="off" value={values.movie} class="form-input mt-1 w-full">
</label>
<input type="submit" value={submitLabel} disabled={loadError && "disabled"} class="bg-yellow-400 hover:bg-yellow-500 text-gray-900 round p-3" />
</form>

Create a new page file, src/pages/add.astro:

---
import Layout from '../layouts/Layout.astro';
import QuoteForm from '../components/QuoteForm.astro';
import type { QuoteFormData } from '../components/QuoteForm.astro';

let formData: QuoteFormData = {};
let saveError = false;
---

<Layout title="Add a movie quote" page="add">
<main>
<h2>Add a quote</h2>
<QuoteForm action="/add" values={formData} saveError={saveError} submitLabel="Add quote" />
</main>
</Layout>

And now let's add a link to this page in the layout navigation in src/layouts/Layout.astro:

<nav class="prose mx-auto mb-6 border-y border-gray-200 flex">
<a href="/" class={`p-3 ${page === "listing" && navActiveClasses}`}>All quotes</a>
<a href="/add" class={`p-3 ${page === "add" && navActiveClasses}`}>Add a quote</a>
</nav>

Send form data to the API

When a user submits the add quote form we want to send the form data to our API +so it can then save it to our database. Let's wire that up now.

First we're going to create a new file, src/lib/request-utils.js:

export function isPostRequest (request) {
return request.method === 'POST'
}

export async function getFormData (request) {
const formData = await request.formData()

return Object.fromEntries(formData.entries())
}

Then let's update the component script in src/pages/add.astro to use +these new request utility functions:

---
import Layout from '../layouts/Layout.astro';
import QuoteForm from '../components/QuoteForm.astro';
import type { QuoteFormData } from '../components/QuoteForm.astro';

import { isPostRequest, getFormData } from '../lib/request-utils';

let formData: QuoteFormData = {};
let saveError = false;

if (isPostRequest(Astro.request)) {
formData = await getFormData(Astro.request);
}
---

When we create a new quote entity record via our API, we need to include a +movieId field that references a movie entity record. This means that when a +user submits the add quote form we need to:

  • Check if a movie entity record already exists with that movie name
  • Return the movie id if it does exist
  • If it doesn't exist, create a new movie entity record and return the movie ID

Let's update the import statement at the top of src/lib/quotes-api.js

-import { createClient } from '@urql/core'
+import { createClient, gql } from '@urql/core'

And then add a new method that will return a movie ID for us:

async function getMovieId (movieName) {
movieName = movieName.trim()

let movieId = null

// Check if a movie already exists with the provided name.
const queryMoviesResult = await quotesApi.query(
gql`
query ($movieName: String!) {
movies(where: { name: { eq: $movieName } }) {
id
}
}
`,
{ movieName }
)

if (queryMoviesResult.error) {
return null
}

const movieExists = queryMoviesResult.data?.movies.length === 1
if (movieExists) {
movieId = queryMoviesResult.data.movies[0].id
} else {
// Create a new movie entity record.
const saveMovieResult = await quotesApi.mutation(
gql`
mutation ($movieName: String!) {
saveMovie(input: { name: $movieName }) {
id
}
}
`,
{ movieName }
)

if (saveMovieResult.error) {
return null
}

movieId = saveMovieResult.data?.saveMovie.id
}

return movieId
}

And let's export it too:

export const quotesApi = {
async query (gqlQuery, queryVariables = {}) {
return await graphqlClientWrapper('query', gqlQuery, queryVariables)
},
async mutation (gqlQuery, queryVariables = {}) {
return await graphqlClientWrapper('mutation', gqlQuery, queryVariables)
},
getMovieId
}

Now we can wire up the last parts in the src/pages/add.astro component +script:

---
import Layout from '../layouts/Layout.astro';
import QuoteForm from '../components/QuoteForm.astro';
import type { QuoteFormData } from '../components/QuoteForm.astro';

import { quotesApi, gql } from '../lib/quotes-api';
import { isPostRequest, getFormData } from '../lib/request-utils';

let formData: QuoteFormData = {};
let saveError = false;

if (isPostRequest(Astro.request)) {
formData = await getFormData(Astro.request);

const movieId = await quotesApi.getMovieId(formData.movie);

if (movieId) {
const quote = {
quote: formData.quote,
saidBy: formData.saidBy,
movieId,
};

const { error } = await quotesApi.mutation(gql`
mutation($quote: QuoteInput!) {
saveQuote(input: $quote) {
id
}
}
`, { quote });

if (!error) {
return Astro.redirect('/');
} else {
saveError = true;
}
} else {
saveError = true;
}
}

Add autosuggest for movies

We can create a better experience for our users by autosuggesting the movie name +when they're adding a new quote.

Let's open up src/components/QuoteForm.astro and import our API helper methods +in the component script:

import { quotesApi, gql } from '../lib/quotes-api.js';

Then let's add in a query to our GraphQL API for all movies:

const { data } = await quotesApi.query(gql`
query {
movies {
name
}
}
`);

const movies = data?.movies || [];

Now lets update the Movie field in the component template to use the +array of movies that we've retrieved from the API:

<label for="movie" class="block">
<span>Movie</span>
<input list="movies" id="movie" name="movie" required="required" autocomplete="off" value={values.movie} class="form-input mt-1 w-full">
<datalist id="movies">
{movies.map(({ name }) => (
<option>{name}</option>
))}
</datalist>
</label>

Create an edit quote page

Let's create a new directory, src/pages/edit/:

mkdir src/pages/edit/

And inside of it, let's create a new page, [id].astro:

---
import Layout from '../../layouts/Layout.astro';
import QuoteForm, { QuoteFormData } from '../../components/QuoteForm.astro';

const id = Number(Astro.params.id);

let formValues: QuoteFormData = {};
let loadError = false;
let saveError = false;
---

<Layout title="Edit movie quote">
<main>
<h2>Edit quote</h2>
<QuoteForm action={`/edit/${id}`} values={formValues} saveError={saveError} loadError={loadError} submitLabel="Update quote" />
</main>
</Layout>

You'll see that we're using the same QuoteForm component that our add quote +page uses. Now we're going to wire up our edit page so that it can load an +existing quote from our API and save changes back to the API when the form is +submitted.

In the [id.astro] component script, let's add some code to take care of +these tasks:

---
import Layout from '../../layouts/Layout.astro';
import QuoteForm, { QuoteFormData } from '../../components/QuoteForm.astro';

import { quotesApi, gql } from '../../lib/quotes-api';
import { isPostRequest, getFormData } from '../../lib/request-utils';

const id = Number(Astro.params.id);

let formValues: QuoteFormData = {};
let loadError = false;
let saveError = false;

if (isPostRequest(Astro.request)) {
const formData = await getFormData(Astro.request);
formValues = formData;

const movieId = await quotesApi.getMovieId(formData.movie);

if (movieId) {
const quote = {
id,
quote: formData.quote,
saidBy: formData.saidBy,
movieId,
};

const { error } = await quotesApi.mutation(gql`
mutation($quote: QuoteInput!) {
saveQuote(input: $quote) {
id
}
}
`, { quote });

if (!error) {
return Astro.redirect('/');
} else {
saveError = true;
}
} else {
saveError = true;
}
} else {
const { data } = await quotesApi.query(gql`
query($id: ID!) {
getQuoteById(id: $id) {
id
quote
saidBy
movie {
id
name
}
}
}
`, { id });

if (data?.getQuoteById) {
formValues = {
...data.getQuoteById,
movie: data.getQuoteById.movie.name
};
} else {
loadError = true;
}
}
---

Load up http://localhost:3000/edit/1 in your +browser to test out the edit quote page.

Now we're going to add edit links to the quotes listing page. Let's start by +creating a new component src/components/QuoteActionEdit.astro:

---
export interface Props {
id: number;
}

const { id } = Astro.props;
---
<a href={`/edit/${id}`} class="flex items-center mr-5 text-gray-400 hover:text-yellow-600 underline decoration-yellow-600 decoration-2 underline-offset-4">
<svg class="w-6 h-6 mr-1" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24" fill="currentColor" class="w-6 h-6">
<path d="M21.731 2.269a2.625 2.625 0 00-3.712 0l-1.157 1.157 3.712 3.712 1.157-1.157a2.625 2.625 0 000-3.712zM19.513 8.199l-3.712-3.712-8.4 8.4a5.25 5.25 0 00-1.32 2.214l-.8 2.685a.75.75 0 00.933.933l2.685-.8a5.25 5.25 0 002.214-1.32l8.4-8.4z" />
<path d="M5.25 5.25a3 3 0 00-3 3v10.5a3 3 0 003 3h10.5a3 3 0 003-3V13.5a.75.75 0 00-1.5 0v5.25a1.5 1.5 0 01-1.5 1.5H5.25a1.5 1.5 0 01-1.5-1.5V8.25a1.5 1.5 0 011.5-1.5h5.25a.75.75 0 000-1.5H5.25z" />
</svg>
<span class="hover:underline hover:decoration-yellow-600">Edit</span>
</a>

Then let's import this component and use it in our listing page, +src/pages/index.astro:

---
import Layout from '../layouts/Layout.astro';
import QuoteActionEdit from '../components/QuoteActionEdit.astro';
import { quotesApi, gql } from '../lib/quotes-api';

// ...
---

<Layout title="All quotes" page="listing">
<main>
{quotes.length > 0 ? quotes.map((quote) => (
<div class="border-b mb-6">
...
<div class="flex flex-col mb-6 text-gray-400">
<span class="flex items-center">
<QuoteActionEdit id={quote.id} />
</span>
<span class="mt-4 text-gray-400 italic">Added {new Date(Number(quote.createdAt)).toUTCString()}</span>
</div>
</div>
)) : (
<p>No movie quotes have been added.</p>
)}
</main>
</Layout>

Add delete quote functionality

Our Movie Quotes app can create, retrieve and update quotes. Now we're going +to implement the D in CRUD — delete!

First let's create a new component, src/components/QuoteActionDelete.astro:

---
export interface Props {
id: number;
}

const { id } = Astro.props;
---
<form method="POST" action={`/delete/${id}`} class="form-delete-quote m-0">
<button type="submit" class="flex items-center text-gray-400 hover:text-red-700 underline decoration-red-700 decoration-2 underline-offset-4">
<svg class="w-6 h-6 mr-1" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24" fill="currentColor" class="w-6 h-6">
<path fill-rule="evenodd" d="M12 2.25c-5.385 0-9.75 4.365-9.75 9.75s4.365 9.75 9.75 9.75 9.75-4.365 9.75-9.75S17.385 2.25 12 2.25zm-1.72 6.97a.75.75 0 10-1.06 1.06L10.94 12l-1.72 1.72a.75.75 0 101.06 1.06L12 13.06l1.72 1.72a.75.75 0 101.06-1.06L13.06 12l1.72-1.72a.75.75 0 10-1.06-1.06L12 10.94l-1.72-1.72z" clip-rule="evenodd" />
</svg>
<span>Delete</span>
</button>
</form>

And then we'll drop it into our listing page, src/pages/index.astro:

---
import Layout from '../layouts/Layout.astro';
import QuoteActionEdit from '../components/QuoteActionEdit.astro';
import QuoteActionDelete from '../components/QuoteActionDelete.astro';
import { quotesApi, gql } from '../lib/quotes-api';

// ...
---

<Layout title="All quotes" page="listing">
<main>
{quotes.length > 0 ? quotes.map((quote) => (
<div class="border-b mb-6">
...
<div class="flex flex-col mb-6 text-gray-400">
<span class="flex items-center">
<QuoteActionEdit id={quote.id} />
<QuoteActionDelete id={quote.id} />
</span>
<span class="mt-4 text-gray-400 italic">Added {new Date(Number(quote.createdAt)).toUTCString()}</span>
</div>
</div>
...

At the moment when a delete form is submitted from our listing page, we get +an Astro 404 page. Let's fix this by creating a new directory, src/pages/delete/:

mkdir src/pages/delete/

And inside of it, let's create a new page, [id].astro:

---
import Layout from '../../layouts/Layout.astro';

import { quotesApi, gql } from '../../lib/quotes-api';
import { isPostRequest } from '../../lib/request-utils';

if (isPostRequest(Astro.request)) {
const id = Number(Astro.params.id);

const { error } = await quotesApi.mutation(gql`
mutation($id: ID!) {
deleteQuotes(where: { id: { eq: $id }}) {
id
}
}
`, { id });

if (!error) {
return Astro.redirect('/');
}
}
---
<Layout title="Delete movie quote">
<main>
<h2>Delete quote</h2>
<p class="text-lg bg-red-200 p-4">There was an error deleting the quote. Please try again.</p>
</main>
</Layout>

Now if we click on a delete quote button on our listings page, it should call our +GraphQL API to delete the quote. To make this a little more user friendly, let's +add in a confirmation dialog so that users don't delete a quote by accident.

Let's create a new directory, src/scripts/:

mkdir src/scripts/

And inside of that directory let's create a new file, quote-actions.js:

// src/scripts/quote-actions.js

export function confirmDeleteQuote (form) {
if (confirm('Are you sure want to delete this quote?')) {
form.submit()
}
}

Then we can pull it in as client side JavaScript on our listing page, +src/pages/index.astro:

<Layout>
...
</Layout>

<script>
import { confirmDeleteQuote } from '../scripts/quote-actions.js'

addEventListener('DOMContentLoaded', () => {
document.querySelectorAll('.form-delete-quote').forEach((deleteForm) => {
deleteForm.addEventListener('submit', (event) => {
event.preventDefault()
confirmDeleteQuote(event.currentTarget)
})
})
})
</script>

Build a "like" quote feature

We've built all the basic CRUD (Create, Retrieve, Update & Delete) features +into our application. Now let's build a feature so that users can interact +and "like" their favourite movie quotes.

To build this feature we're going to add custom functionality to our API +and then add a new component, along with some client side JavaScript, to +our frontend.

Create an API migration

We're now going to work on the code for API, under the apps/movie-quotes-api +directory.

First let's create a migration that adds a likes column to our quotes +database table. We'll create a new migration file, migrations/003.do.sql:

ALTER TABLE quotes ADD COLUMN likes INTEGER default 0;

This migration will automatically be applied when we next start our Platformatic +API.

Create an API plugin

To add custom functionality to our Platformatic API, we need to create a +Fastify plugin and +update our API configuration to use it.

Let's create a new file, plugin.js, and inside it we'll add the skeleton +structure for our plugin:

// plugin.js

'use strict'

module.exports = async function plugin (app) {
app.log.info('plugin loaded')
}

Now let's register our plugin in our API configuration file, platformatic.db.json:

{
...
"migrations": {
"dir": "./migrations"
},
"plugins": {
"paths": ["./plugin.js"]
}
}

And then we'll start up our Platformatic API:

npm run dev

We should see log messages that tell us that our new migration has been +applied and our plugin has been loaded:

[10:09:20.052] INFO (146270): running 003.do.sql
[10:09:20.129] INFO (146270): plugin loaded
[10:09:20.209] INFO (146270): server listening
url: "http://127.0.0.1:3042"

Now it's time to start adding some custom functionality inside our plugin.

Add a REST API route

We're going to add a REST route to our API that increments the count of +likes for a specific quote: /quotes/:id/like

First let's add fluent-json-schema as a dependency for our API:

npm install fluent-json-schema

We'll use fluent-json-schema to help us generate a JSON Schema. We can then +use this schema to validate the request path parameters for our route (id).

tip

You can use fastify-type-provider-typebox or typebox if you want to convert your JSON Schema into a Typescript type. See this GitHub thread to have a better overview about it. Look at the example below to have a better overview.

Here you can see in practice of to leverage typebox combined with fastify-type-provider-typebox:

import { FastifyInstance } from "fastify";
import { Static, Type } from "@sinclair/typebox";
import { TypeBoxTypeProvider } from "@fastify/type-provider-typebox";

/**
* Creation of the JSON schema needed to validate the params passed to the route
*/
const schemaParams = Type.Object({
num1: Type.Number(),
num2: Type.Number(),
});

/**
* We convert the JSON schema to the TypeScript type, in this case:
* {
num1: number;
num2: number;
}
*/
type Params = Static<typeof schemaParams>;

/**
* Here we can pass the type previously created to our syncronous unit function
*/
const multiplication = ({ num1, num2 }: Params) => num1 * num2;

export default async function (app: FastifyInstance) {
app.withTypeProvider<TypeBoxTypeProvider>().get(
"/multiplication/:num1/:num2",
{ schema: { params: schemaParams } },
/**
* Since we leverage `withTypeProvider<TypeBoxTypeProvider>()`,
* we no longer need to explicitly define the `params`.
* The will be automatically inferred as:
* {
num1: number;
num2: number;
}
*/
({ params }) => multiplication(params)
);
}

Now let's add our REST API route in plugin.js:

'use strict'

const S = require('fluent-json-schema')

module.exports = async function plugin (app) {
app.log.info('plugin loaded')

// This JSON Schema will validate the request path parameters.
// It reuses part of the schema that Platormatic DB has
// automatically generated for our Quote entity.
const schema = {
params: S.object().prop('id', app.getSchema('Quote').properties.id)
}

app.post('/quotes/:id/like', { schema }, async function (request, response) {
return {}
})
}

We can now make a POST request to our new API route:

curl --request POST http://localhost:3042/quotes/1/like
info

Learn more about how validation works in the +Fastify validation documentation.

Our API route is currently returning an empty object ({}). Let's wire things +up so that it increments the number of likes for the quote with the specified ID. +To do this we'll add a new function inside of our plugin:

module.exports = async function plugin (app) {
app.log.info('plugin loaded')

async function incrementQuoteLikes (id) {
const { db, sql } = app.platformatic

const result = await db.query(sql`
UPDATE quotes SET likes = likes + 1 WHERE id=${id} RETURNING likes
`)

return result[0]?.likes
}

// ...
}

And then we'll call that function in our route handler function:

app.post('/quotes/:id/like', { schema }, async function (request, response) {
return { likes: await incrementQuoteLikes(request.params.id) }
})

Now when we make a POST request to our API route:

curl --request POST http://localhost:3042/quotes/1/like

We should see that the likes value for the quote is incremented every time +we make a request to the route.

{"likes":1}

Add a GraphQL API mutation

We can add a likeQuote mutation to our GraphQL API by reusing the +incrementQuoteLikes function that we just created.

Let's add this code at the end of our plugin, inside plugin.js:

module.exports = async function plugin (app) {
// ...

app.graphql.extendSchema(`
extend type Mutation {
likeQuote(id: ID!): Int
}
`)

app.graphql.defineResolvers({
Mutation: {
likeQuote: async (_, { id }) => await incrementQuoteLikes(id)
}
})
}

The code we've just added extends our API's GraphQL schema and defines +a corresponding resolver for the likeQuote mutation.

We can now load up GraphiQL in our web browser and try out our new likeQuote +mutation with this GraphQL query:

mutation {
likeQuote(id: 1)
}
info

Learn more about how to extend the GraphQL schema and define resolvers in the +Mercurius API documentation.

Enable CORS on the API

When we build "like" functionality into our frontend, we'll be making a client +side HTTP request to our GraphQL API. Our backend API and our frontend are running +on different origins, so we need to configure our API to allow requests from +the frontend. This is known as Cross-Origin Resource Sharing (CORS).

To enable CORS on our API, let's open up our API's .env file and add in +a new setting:

PLT_SERVER_CORS_ORIGIN=http://localhost:3000

The value of PLT_SERVER_CORS_ORIGIN is our frontend application's origin.

Now we can add a cors configuration object in our API's configuration file, +platformatic.db.json:

{
"server": {
"logger": {
"level": "{PLT_SERVER_LOGGER_LEVEL}"
},
"hostname": "{PLT_SERVER_HOSTNAME}",
"port": "{PORT}",
"cors": {
"origin": "{PLT_SERVER_CORS_ORIGIN}"
}
},
...
}

The HTTP responses from all endpoints on our API will now include the header:

access-control-allow-origin: http://localhost:3000

This will allow JavaScript running on web pages under the http://localhost:3000 +origin to make requests to our API.

Add like quote functionality

Now that our API supports "liking" a quote, let's integrate it as a feature in +our frontend.

First we'll create a new component, src/components/QuoteActionLike.astro:

---
export interface Props {
id: number;
likes: number;
}

const { id, likes } = Astro.props;
---
<span data-quote-id={id} class="like-quote cursor-pointer mr-5 flex items-center">
<svg class="like-icon w-6 h-6 mr-2 text-red-600" xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 24 24" stroke-width="1.5" stroke="currentColor" class="w-6 h-6">
<path stroke-linecap="round" stroke-linejoin="round" d="M21 8.25c0-2.485-2.099-4.5-4.688-4.5-1.935 0-3.597 1.126-4.312 2.733-.715-1.607-2.377-2.733-4.313-2.733C5.1 3.75 3 5.765 3 8.25c0 7.22 9 12 9 12s9-4.78 9-12z" />
</svg>
<span class="likes-count w-8">{likes}</span>
</span>

<style>
.like-quote:hover .like-icon,
.like-quote.liked .like-icon {
fill: currentColor;
}
</style>

And in our listing page, src/pages/index.astro, let's import our new +component and add it into the interface:

---
import Layout from '../layouts/Layout.astro';
import QuoteActionEdit from '../components/QuoteActionEdit.astro';
import QuoteActionDelete from '../components/QuoteActionDelete.astro';
import QuoteActionLike from '../components/QuoteActionLike.astro';
import { quotesApi, gql } from '../lib/quotes-api';

// ...
---

<Layout title="All quotes" page="listing">
<main>
{quotes.length > 0 ? quotes.map((quote) => (
<div class="border-b mb-6">
...
<div class="flex flex-col mb-6 text-gray-400">
<span class="flex items-center">
<QuoteActionLike id={quote.id} likes={quote.likes} />
<QuoteActionEdit id={quote.id} />
<QuoteActionDelete id={quote.id} />
</span>
<span class="mt-4 text-gray-400 italic">Added {new Date(Number(quote.createdAt)).toUTCString()}</span>
</div>
</div>
...

Then let's update the GraphQL query in this component's script to retrieve the +likes field for all quotes:

const { data } = await quotesApi.query(gql`
query {
quotes {
id
quote
saidBy
likes
createdAt
movie {
id
name
}
}
}
`);

Now we have the likes showing for each quote, let's wire things up so that +clicking on the like component for a quote will call our API and add a like.

Let's open up src/scripts/quote-actions.js and add a new function that +makes a request to our GraphQL API:

import { quotesApi, gql } from '../lib/quotes-api.js'

export function confirmDeleteQuote (form) {
if (confirm('Are you sure want to delete this quote?')) {
form.submit()
}
}

export async function likeQuote (likeQuote) {
likeQuote.classList.add('liked')
likeQuote.classList.remove('cursor-pointer')

const id = Number(likeQuote.dataset.quoteId)

const { data } = await quotesApi.mutation(gql`
mutation($id: ID!) {
likeQuote(id: $id)
}
`, { id })

if (data?.likeQuote) {
likeQuote.querySelector('.likes-count').innerText = data.likeQuote
}
}

And then let's attach the likeQuote function to the click event for each +like quote component on our listing page. We can do this by adding a little +extra code inside the <script> block in src/pages/index.astro:

<script>
import { confirmDeleteQuote, likeQuote } from '../scripts/quote-actions.js'

addEventListener('DOMContentLoaded', () => {
document.querySelectorAll('.form-delete-quote').forEach((deleteForm) => {
deleteForm.addEventListener('submit', (event) => {
event.preventDefault()
confirmDeleteQuote(event.currentTarget)
})
})

document.querySelectorAll('.like-quote').forEach((container) => {
container.addEventListener('click', (event) => likeQuote(event.currentTarget), { once: true })
})
})
</script>

Sort the listing by top quotes

Now that users can like their favourite quotes, as a final step, we'll allow +for sorting quotes on the listing page by the number of likes they have.

Let's update src/pages/index.astro to read a sort query string parameter +and use it the GraphQL query that we make to our API:

---
// ...

const allowedSortFields = ["createdAt", "likes"];
const searchParamSort = new URL(Astro.request.url).searchParams.get("sort");
const sort = allowedSortFields.includes(searchParamSort) ? searchParamSort : "createdAt";

const { data } = await quotesApi.query(gql`
query {
quotes(orderBy: {field: ${sort}, direction: DESC}) {
id
quote
saidBy
likes
createdAt
movie {
id
name
}
}
}
`);

const quotes = data?.quotes || [];
---
<Layout title="All quotes" page={`listing-${sort}`}>
...

Then let's replace the 'All quotes' link in the <nav> in src/layouts/Layout.astro +with two new links:

<nav class="prose mx-auto mb-6 border-y border-gray-200 flex">
<a href="/?sort=createdAt" class={`p-3 ${page === "listing-createdAt" && navActiveClasses}`}>Latest quotes</a>
<a href="/?sort=likes" class={`p-3 ${page === "listing-likes" && navActiveClasses}`}>Top quotes</a>
<a href="/add" class={`p-3 ${page === "add" && navActiveClasses}`}>Add a quote</a>
</nav>

With these few extra lines of code, our users can now sort quotes by when they +were created or by the number of likes that they have. Neat!

Wrapping up

And we're done — you now have the knowledge you need to build a full stack +application on top of Platformatic DB.

We can't wait to see what you'll build next!

+ + + + \ No newline at end of file diff --git a/docs/0.41.2/getting-started/new-api-project-instructions/index.html b/docs/0.41.2/getting-started/new-api-project-instructions/index.html new file mode 100644 index 00000000000..aafbb5d0d7d --- /dev/null +++ b/docs/0.41.2/getting-started/new-api-project-instructions/index.html @@ -0,0 +1,20 @@ + + + + + +new-api-project-instructions | Platformatic Open Source Software + + + + + +
+
Version: 0.41.2

new-api-project-instructions

Run this command in your terminal to start the Platformatic creator wizard:

npm create platformatic@latest

This interactive command-line tool will ask you some questions about how you'd +like to set up your new Platformatic project. For this guide, select these options:

- Which kind of project do you want to create?  => DB
- Where would you like to create your project? => quick-start
- Do you want to create default migrations? => Yes
- Do you want to create a plugin? => Yes
- Do you want to use TypeScript? => No
- Do you want to install dependencies? => Yes (this can take a while)
- Do you want to apply the migrations? => Yes
- Do you want to generate types? => Yes
- Do you want to create the github action to deploy this application to Platformatic Cloud dynamic workspace? => No
- Do you want to create the github action to deploy this application to Platformatic Cloud static workspace? => No

Once the wizard is complete, you'll have a Platformatic app project in the +folder quick-start, with example migration files and a plugin script.

info

Make sure you run the npm/yarn/pnpm command install command manually if you +don't ask the wizard to do it for you.

+ + + + \ No newline at end of file diff --git a/docs/0.41.2/getting-started/quick-start-guide/index.html b/docs/0.41.2/getting-started/quick-start-guide/index.html new file mode 100644 index 00000000000..3864b826c40 --- /dev/null +++ b/docs/0.41.2/getting-started/quick-start-guide/index.html @@ -0,0 +1,38 @@ + + + + + +Quick Start Guide | Platformatic Open Source Software + + + + + +
+
Version: 0.41.2

Quick Start Guide

In this guide you'll learn how to create and run your first API with +Platformatic DB. Let's get started!

info

This guide uses SQLite for the database, but +Platformatic DB also supports PostgreSQL, +MySQL and MariaDB databases.

Prerequisites

Platformatic supports macOS, Linux and Windows (WSL recommended).

To follow along with this guide you'll need to have these things installed:

Create a new API project

Automatic CLI

Run this command in your terminal to start the Platformatic creator wizard:

npm create platformatic@latest

This interactive command-line tool will ask you some questions about how you'd +like to set up your new Platformatic project. For this guide, select these options:

- Which kind of project do you want to create?  => DB
- Where would you like to create your project? => quick-start
- Do you want to create default migrations? => Yes
- Do you want to create a plugin? => Yes
- Do you want to use TypeScript? => No
- Do you want to install dependencies? => Yes (this can take a while)
- Do you want to apply the migrations? => Yes
- Do you want to generate types? => Yes
- Do you want to create the github action to deploy this application to Platformatic Cloud dynamic workspace? => No
- Do you want to create the github action to deploy this application to Platformatic Cloud static workspace? => No

Once the wizard is complete, you'll have a Platformatic app project in the +folder quick-start, with example migration files and a plugin script.

info

Make sure you run the npm/yarn/pnpm command install command manually if you +don't ask the wizard to do it for you.

Start your API server

In your project directory, run this command to start your API server:

npm start

Your Platformatic API is now up and running! 🌟

This command will:

  • Automatically map your SQL database to REST and GraphQL API interfaces.
  • Start the Platformatic API server.

You can jump down to Next steps or read on to learn more about +the project files that the wizard has created for you.

Check the database schema

In your project directory (quick-start), open the migrations directory that can store your database migration files that will contain both the 001.do.sql and 001.undo.sql files. The 001.do.sql file contains the SQL statements to create the database objects, while the 001.undo.sql file contains the SQL statements to drop them.

migrations/001.do.sql
CREATE TABLE IF NOT EXISTS movies (
id INTEGER PRIMARY KEY,
title TEXT NOT NULL
);

Note that this migration has been already applied by Platformatic creator.

Check your API configuration

In your project directory, check the Platformatic configuration file named +platformatic.db.json and the environment file named .env:

The created configuration tells Platformatic to:

  • Run an API server on http://127.0.0.1:3042/
  • Connect to an SQLite database stored in a file named db.sqlite
  • Look for database migration files in the migrations directory
  • Load the plugin file named plugin.js and automatically generate types
tip

The Configuration reference explains all of the +supported configuration options.

Manual setup

Create a directory for your new API project:

mkdir quick-start

cd quick-start

Then create a package.json file and install the platformatic +CLI as a project dependency:

npm init --yes

npm install platformatic

Add a database schema

In your project directory (quick-start), create a file for your sqlite3 database and also, a migrations directory to +store your database migration files:

touch db.sqlite

mkdir migrations

Then create a new migration file named 001.do.sql in the migrations +directory.

Copy and paste this SQL query into the migration file:

migrations/001.do.sql
CREATE TABLE movies (
id INTEGER PRIMARY KEY,
title VARCHAR(255) NOT NULL
);

When it's run by Platformatic, this query will create a new database table +named movies.

tip

You can check syntax for SQL queries on the Database.Guide SQL Reference.

Configure your API

In your project directory, create a new Platformatic configuration file named +platformatic.db.json.

Copy and paste in this configuration:

platformatic.db.json
{
"server": {
"hostname": "127.0.0.1",
"port": "3042"
},
"db": {
"connectionString": "sqlite://./db.sqlite"
},
"migrations": {
"dir": "./migrations",
"autoApply": "true"
}
}

This configuration tells Platformatic to:

  • Run an API server on http://127.0.0.1:3042/
  • Connect to an SQLite database stored in a file named db.sqlite
  • Look for, and apply the database migrations specified in the migrations directory
tip

The Configuration reference explains all of the +supported configuration options.

Start your API server

In your project directory, use the Platformatic CLI to start your API server:

npx platformatic db start

This will:

  • Automatically map your SQL database to REST and GraphQL API interfaces.
  • Start the Platformatic API server.

Your Platformatic API is now up and running! 🌟

Next steps

Use the REST API interface

You can use cURL to make requests to the REST interface of your API, for example:

Create a new movie

curl -X POST -H "Content-Type: application/json" \
-d "{ \"title\": \"Hello Platformatic DB\" }" \
http://localhost:3042/movies

You should receive a response from your API like this:

{"id":1,"title":"Hello Platformatic DB"}

Get all movies

curl http://localhost:3042/movies

You should receive a response from your API like this, with an array +containing all the movies in your database:

[{"id":1,"title":"Hello Platformatic DB"}]
tip

If you would like to know more about what routes are automatically available, +take a look at the REST API reference +for an overview of the REST interface that the generated API provides.

Swagger OpenAPI documentation

You can explore the OpenAPI documentation for your REST API in the Swagger UI at +http://localhost:3042/documentation

Use the GraphQL API interface

Open http://localhost:3042/graphiql in your +web browser to explore the GraphQL interface of your API.

Try out this GraphQL query to retrieve all movies from your API:

query {
movies {
id
title
}
}
tip

Learn more about your API's GraphQL interface in the +GraphQL API reference.

+ + + + \ No newline at end of file diff --git a/docs/0.41.2/guides/add-custom-functionality/extend-graphql/index.html b/docs/0.41.2/guides/add-custom-functionality/extend-graphql/index.html new file mode 100644 index 00000000000..113365c456f --- /dev/null +++ b/docs/0.41.2/guides/add-custom-functionality/extend-graphql/index.html @@ -0,0 +1,18 @@ + + + + + +Extend GraphQL Schema | Platformatic Open Source Software + + + + + +
+
Version: 0.41.2

Extend GraphQL Schema

Sum Function

Copy and paste this code into ./sample-plugin.js file

'use strict'
module.exports = async(app, opts) => {
app.graphql.extendSchema(`
extend type Query {
add(x: Int, y: Int): Int
}
`)
app.graphql.defineResolvers({
Query: {
add: async (_, { x, y }) => x + y
}
})
}

This will add a new GraphQL query called add which will simply add the two inputs x and y provided.

You don't need to reload the server, since it will watch this file and hot-reload itself. +Let's query the server with the following body


query{
add(x: 1, y: 2)
}

You can use curl command to run this query

$ curl --location --request POST 'http://localhost:3042/graphql' \
--header 'Content-Type: application/json' \
--data-raw '{"query":"query{\n add(x: 1, y: 2)\n}"}'

You will get this output, with the sum.

{
"data": {
"add": 3
}
}

Extend Entities API

Let's implement a getPageByTitle query

'use strict'
module.exports = async(app, opts) => {
app.graphql.extendSchema(`
extend type Query {
getPageByTitle(title: String): Page
}
`)
app.graphql.defineResolvers({
Query: {
getPageByTitle: async(_, { title }) => {
const res = await app.platformatic.entities.page.find({
where: {
title: {
eq: title
}
}
})
if (res) {
return res[0]
}
return null
}
}
})
}

Page GraphQL type is already defined by Platformatic DB on start.

We are going to run this code against this GraphQL query

query{
getPageByTitle(title: "First Page"){
id
title
}
}

You can use curl command to run this query

$ curl --location --request POST 'http://localhost:3042/graphql' \
--header 'Content-Type: application/json' \
--data-raw '{"query":"query{\n getPageByTitle(title: \"First Page\"){\n id\n title\n }\n}"}'

You will get an output similar to this

{
"data": {
"getPageByTitle": {
"id": "1",
"title": "First Page"
}
}
}
+ + + + \ No newline at end of file diff --git a/docs/0.41.2/guides/add-custom-functionality/extend-rest/index.html b/docs/0.41.2/guides/add-custom-functionality/extend-rest/index.html new file mode 100644 index 00000000000..be308b9c85a --- /dev/null +++ b/docs/0.41.2/guides/add-custom-functionality/extend-rest/index.html @@ -0,0 +1,17 @@ + + + + + +Extend REST API | Platformatic Open Source Software + + + + + +
+
Version: 0.41.2

Extend REST API

We will follow same examples implemented in GraphQL examples: a sum function and an API to get pages by title.

Sum Function

Copy and paste this code into ./sample-plugin.js file

'use strict'
module.exports = async(app, opts) => {
app.post('/sum', async(req, reply) => {
const { x, y } = req.body
return { sum: (x + y)}
})
}

You don't need to reload the server, since it will watch this file and hot-reload itself.

Let's make a POST /sum request to the server with the following body

{
"x": 1,
"y": 2
}

You can use curl command to run this query

$ curl --location --request POST 'http://localhost:3042/sum' \
--header 'Content-Type: application/json' \
--data-raw '{
"x": 1,
"y": 2
}'

You will get this output, with the sum.

{
"sum": 3
}

Extend Entities API

Let's implement a /page-by-title endpoint, using Entities API

'use strict'
module.exports = async(app, opts) => {
app.get('/page-by-title', async(req, reply) => {
const { title } = req.query
const res = await app.platformatic.entities.page.find({
where: {
title: {
eq: title
}
}
})
if (res) {
return res[0]
}
return null
})
}

We will make a GET /page-by-title?title=First%20Page request, and we expect a single page as output.

You can use curl command to run this query

$ curl --location --request GET 'http://localhost:3042/page-by-title?title=First Page'

You will get an output similar to this

{
"id": "1",
"title": "First Page",
"body": "This is the first sample page"
}
+ + + + \ No newline at end of file diff --git a/docs/0.41.2/guides/add-custom-functionality/introduction/index.html b/docs/0.41.2/guides/add-custom-functionality/introduction/index.html new file mode 100644 index 00000000000..99c9c027382 --- /dev/null +++ b/docs/0.41.2/guides/add-custom-functionality/introduction/index.html @@ -0,0 +1,17 @@ + + + + + +Add Custom Functionality | Platformatic Open Source Software + + + + + +
+
Version: 0.41.2

Add Custom Functionality

If you want to extend Platformatic DB features, it is possible to register a plugin, which will be in the form of a standard Fastify plugin.

The config file will specify where the plugin file is located as the example below:

{
...
"plugins": {
"paths": ["./plugin/index.js"]
}
}

The path is relative to the config file path.

Since it uses fastify-isolate under the hood, all other options of that package may be specified under the plugin property.

Once the config file is set up, you can write your plugin

module.exports = async function (app) {
app.log.info('plugin loaded')
// Extend GraphQL Schema with resolvers
app.graphql.extendSchema(`
extend type Query {
add(x: Int, y: Int): Int
}
`)
app.graphql.defineResolvers({
Query: {
add: async (_, { x, y }) => x + y
}
})

// Create a new route, see https://www.fastify.io/docs/latest/Reference/Routes/ for more info
app.post('/sum', (req, reply) => {
const {x, y} = req.body
return { result: x + y }
})

// access platformatic entities data
app.get('/all-entities', (req, reply) => {
const entities = Object.keys(app.platformatic.entities)
return { entities }
})
}

+ + + + \ No newline at end of file diff --git a/docs/0.41.2/guides/add-custom-functionality/prerequisites/index.html b/docs/0.41.2/guides/add-custom-functionality/prerequisites/index.html new file mode 100644 index 00000000000..aa7f5e4e40a --- /dev/null +++ b/docs/0.41.2/guides/add-custom-functionality/prerequisites/index.html @@ -0,0 +1,17 @@ + + + + + +Prerequisites | Platformatic Open Source Software + + + + + +
+
Version: 0.41.2

Prerequisites

In the following examples we assume you already

  • cloned platformatic/platformatic repo from Github
  • ran pnpm install to install all dependencies
  • have Docker and docker-compose installed and running on your machine

Config File

Create a platformatic.db.json file in the root project, it will be loaded automatically by Platformatic (no need of -c, --config flag).

{
"server": {
"hostname": "127.0.0.1",
"port": 3042,
"logger": {
"level": "info"
}
},
"db": {
"connectionString": "postgres://postgres:postgres@127.0.0.1/postgres"
},
"migrations": {
"dir": "./migrations",
"table": "versions"
},
"plugins": {
"paths": ["plugin.js"]
}
}
  • Once Platformatic DB starts, its API will be available at http://127.0.0.1:3042
  • It will connect and read the schema from a PostgreSQL DB
  • Will read migrations from ./migrations directory
  • Will load custom functionallity from ./plugin.js file.

Database and Migrations

Start the database using the sample docker-compose.yml file.

$ docker-compose up -d postgresql

For migrations create a ./migrations directory and a 001.do.sql file with following contents

CREATE TABLE pages (
id SERIAL PRIMARY KEY,
title VARCHAR(255) NOT NULL,
body TEXT NOT NULL
);
INSERT INTO pages (title, body) VALUES ('First Page', 'This is the first sample page');
INSERT INTO pages (title, body) VALUES ('Second Page', 'This is the second sample page');
INSERT INTO pages (title, body) VALUES ('Third Page', 'This is the third sample page');

Plugin

Copy and paste this boilerplate code into ./plugin.js file. We will fill this in the examples.

'use strict'

module.exports = async (app, opts) {
// we will fill this later
}

Start the server

Run

$ platformatic db start

You will get an output similar to this

                           /////////////
///// /////
/// ///
/// ///
/// ///
&& /// /// &&
&&&&&& /// /// &&&&&&
&&&& /// /// &&&&
&&& /// /// &&&&&&&&&&&&
&&& /// /////// //// && &&&&&
&& /// /////////////// &&&
&&& /// /// &&&
&&& /// // &&
&&& /// &&
&&& /// &&&
&&&& /// &&&
&&&&&% /// &&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&
///
///
///
///
///
///

[11:19:46.562] INFO (65122): running 001.do.sql
[11:19:46.929] INFO (65122): server listening
url: "http://127.0.0.1:3042"

Now is possible to create some examples, like extend GraphQL Schema, extend REST API

+ + + + \ No newline at end of file diff --git a/docs/0.41.2/guides/add-custom-functionality/raw-sql/index.html b/docs/0.41.2/guides/add-custom-functionality/raw-sql/index.html new file mode 100644 index 00000000000..690f924edb4 --- /dev/null +++ b/docs/0.41.2/guides/add-custom-functionality/raw-sql/index.html @@ -0,0 +1,17 @@ + + + + + +Raw SQL queries | Platformatic Open Source Software + + + + + +
+
Version: 0.41.2

Raw SQL queries

To run raw SQL queries using plugins, use app.platformatic.db.query method and passe to it a sql query using the app.platformatic.sql method.

'use strict'
module.exports = async(app, opts) => {
app.graphql.extendSchema(`
type YearlySales {
year: Int
sales: Int
}

extend type Query {
yearlySales: [YearlySales]
}
`)
app.graphql.defineResolvers({
Query: {
yearlySales: async(_, { title }) => {
const { db, sql } = app.platformatic;
const res = await db.query(sql(`
SELECT
YEAR(created_at) AS year,
SUM(amount) AS sales
FROM
orders
GROUP BY
YEAR(created_at)
`))
return res
}
}
})
}
+ + + + \ No newline at end of file diff --git a/docs/0.41.2/guides/compiling-typescript-for-deployment/index.html b/docs/0.41.2/guides/compiling-typescript-for-deployment/index.html new file mode 100644 index 00000000000..6bd686c0380 --- /dev/null +++ b/docs/0.41.2/guides/compiling-typescript-for-deployment/index.html @@ -0,0 +1,25 @@ + + + + + +Compiling Typescript for Deployment | Platformatic Open Source Software + + + + + +
+
Version: 0.41.2

Compiling Typescript for Deployment

Platformatic Service provides automatic TypeScript compilation during the startup +of your Node.js server. While this provides an amazing developer experience, in production it adds additional +start time and it requires more resources. In this guide, we show how to compile your TypeScript +source files before shipping to a server.

Setup

The following is supported by all Platformatic applications, as they are all based on the same plugin system. +If you have generated your application using npx create-platformatic@latest, you will have a similar section in your config file:

{
...
"plugins": {
"paths": [{
"path": "plugins",
"encapsulate": false
}, "routes"],
"typescript": "{PLT_TYPESCRIPT}"
}
}

Note that the {PLT_TYPESCRIPT} will be automatically replaced with the PLT_TYPESCRIPT environment variable, that is configured in your +.env (and .env.sample) file:

PLT_TYPESCRIPT=true

Older Platformatic applications might not have the same layout, if so you can update your settings to match (after updating your dependencies).

Compiling for deployment

Compiling for deployment is then as easy as running plt service compile in that same folder. +Rememeber to set PLT_TYPESCRIPT=false in your environment variables in the deployed environments.

Usage with Runtime

If you are building a Runtime-based application, you will need +to compile every service independently or use the plt runtime compile command.

Avoid shipping TypeScript sources

If you want to avoid shipping the TypeScript sources you need to configure Platformatic with the location +where your files have been built by adding an outDir option:

{
...
"plugins": {
"paths": [{
"path": "plugins",
"encapsulate": false
}, "routes"],
"typescript": {
"enabled": "{PLT_TYPESCRIPT}",
"outDir": "dist"
}
}
}

This is not necessary if you include tsconfig.json together with the compiled code.

+ + + + \ No newline at end of file diff --git a/docs/0.41.2/guides/debug-platformatic-db/index.html b/docs/0.41.2/guides/debug-platformatic-db/index.html new file mode 100644 index 00000000000..d6cb00c18ed --- /dev/null +++ b/docs/0.41.2/guides/debug-platformatic-db/index.html @@ -0,0 +1,17 @@ + + + + + +Debug Platformatic DB | Platformatic Open Source Software + + + + + +
+
Version: 0.41.2

Debug Platformatic DB

Error: No tables found in the database

  • Verify your database connection string is correct in your Platformatic DB configuration
    • Make sure the database name is correct
  • Ensure that you have run the migration command npx platformatic db migrations apply before starting the server. See the Platformatic DB Migrations documentation for more information on working with migrations.

Logging SQL queries

You can see all the queries that are being run against your database in your terminal by setting the logger level to trace in your platformatic.db.json config file:

platformatic.db.json
{
"server": {
"logger": {
"level": "trace"
}
}
}
+ + + + \ No newline at end of file diff --git a/docs/0.41.2/guides/deploying-on-lambda/index.html b/docs/0.41.2/guides/deploying-on-lambda/index.html new file mode 100644 index 00000000000..fd031c1d76f --- /dev/null +++ b/docs/0.41.2/guides/deploying-on-lambda/index.html @@ -0,0 +1,26 @@ + + + + + +Deploying on AWS Lambda | Platformatic Open Source Software + + + + + +
+
Version: 0.41.2

Deploying on AWS Lambda

It is possible to deploy Platformatic applications to AWS Lambda +by leveraging @fastify/aws-lambda.

Once you set up your Platformatic DB application, such as following +our tutorial, you can create a +server.mjs file as follows:

import awsLambdaFastify from '@fastify/aws-lambda'
import { buildServer } from '@platformatic/db'

const app = await buildServer('./platformatic.db.json')
// You can use the same approach with both Platformatic DB and
// and service
// const app = await buildServer('./platformatic.service.json')

// The following also work for Platformatic Service applications
// import { buildServer } from '@platformatic/service'
export const handler = awsLambdaFastify(app)

// Loads the Application, must be after the call to `awsLambdaFastify`
await app.ready()

This would be the entry point for your AWS Lambda function.

Avoiding cold start

Caching the DB schema

If you use Platformatic DB, you want to turn on the schemalock +configuration to cache the schema +information on disk.

Set the db.schemalock configuration to true, start the application, +and a schema.lock file should appear. Make sure to commit that file and +deploy your lambda.

Provisioned concurrency

Since AWS Lambda now enables the use of ECMAScript (ES) modules in Node.js 14 runtimes, +you could lower the cold start latency when used with Provisioned Concurrency +thanks to the top-level await functionality. (Excerpt taken from @fastify/aws-lambda)

+ + + + \ No newline at end of file diff --git a/docs/0.41.2/guides/deployment/advanced-fly-io-deployment/index.html b/docs/0.41.2/guides/deployment/advanced-fly-io-deployment/index.html new file mode 100644 index 00000000000..3f150834864 --- /dev/null +++ b/docs/0.41.2/guides/deployment/advanced-fly-io-deployment/index.html @@ -0,0 +1,22 @@ + + + + + +Advanced Fly.io Deployment | Platformatic Open Source Software + + + + + +
+
Version: 0.41.2

Advanced Fly.io Deployment

Techniques used in this guide are based on the Deploy to Fly.io with SQLite +deployment guide.

Adding sqlite for debugging

With a combination of Docker and Fly.io, you can create an easy way to debug +your sqlite aplication without stopping your application or exporting the data. +At the end of this guide, you will be able to run fly ssh console -C db-cli to +be dropped into your remote database.

Start by creating a script for launching the database, calling it db-cli.sh:

#!/bin/sh
set -x
# DSN will be defined in the Dockerfile
sqlite3 $DSN

Create a new Dockerfile which will act as the build and deployment image:

FROM node:18-alpine

# Setup sqlite viewer
RUN apk add sqlite
ENV DSN "/app/.platformatic/data/app.db"
COPY db-cli.sh /usr/local/bin/db-cli
RUN chmod +x /usr/local/bin/db-cli

WORKDIR /app
COPY package.json package.json
COPY package-lock.json package-lock.json

RUN npm ci --omit=dev

COPY platformatic.db.json platformatic.db.json

COPY migrations migrations
# Uncomment if your application is running a plugin
# COPY plugin.js plugin.js

EXPOSE 8080

CMD ["npm", "start"]

Add a start script to your package.json:

{
"scripts": {
"start": "platformatic db"
}
}

With Fly, it becomes straightforward to connect directly to the database by +running the following command from your local machine:

fly ssh console -C db-cli
+ + + + \ No newline at end of file diff --git a/docs/0.41.2/guides/deployment/deploy-to-fly-io-with-sqlite/index.html b/docs/0.41.2/guides/deployment/deploy-to-fly-io-with-sqlite/index.html new file mode 100644 index 00000000000..aa5f79f182b --- /dev/null +++ b/docs/0.41.2/guides/deployment/deploy-to-fly-io-with-sqlite/index.html @@ -0,0 +1,33 @@ + + + + + +Deploy to Fly.io with SQLite | Platformatic Open Source Software + + + + + +
+
Version: 0.41.2

Deploy to Fly.io with SQLite

note

To follow this how-to guide, you'll first need to install the Fly CLI and create +an account by following this official guide. +You will also need an existing Platformatic DB project, please check out our +getting started guide if needed.

Navigate to your Platformatic DB project in the terminal on your local machine. +Run fly launch and follow the prompts. When it asks if you want to deploy +now, say "no" as there are a few things that you'll need to configure first.

You can also create the fly application with one line. This will create your +application in London (lhr):

fly launch --no-deploy --generate-name --region lhr --org personal --path .

The fly CLI should have created a fly.toml file in your project +directory.

Explicit builder

The fly.toml file may be missing an explicit builder setting. To have +consistent builds, it is best to add a build section:

[build]
builder = "heroku/buildpacks:20"

Database storage

Create a volume for database storage, naming it data:

fly volumes create data

This will create storage in the same region as the application. The volume +defaults to 3GB size, use -s to change the size. For example, -s 10 is 10GB.

Add a mounts section in fly.toml:

[mounts]
source = "data"
destination = "/app/.platformatic/data"

Create a directory in your project where your SQLite database will be created:

mkdir -p .platformatic/data

touch .platformatic/data/.gitkeep

The .gitkeep file ensures that this directory will always be created when +your application is deployed.

You should also ensure that your SQLite database is ignored by Git. This helps +avoid inconsistencies when your application is deployed:

echo "*.db" >> .gitignore

The command above assumes that your SQLite database file ends with the extension +.db — if the extension is different then you must change the command to match.

Change the connection string to an environment variable and make sure that +migrations are autoApplying (for platformatic@^0.4.0) in platformatic.db.json:

{
"db": {
"connectionString": "{DATABASE_URL}"
},
"migrations": {
"dir": "./migrations",
"autoApply": true
}
}

Configure server

Make sure that your platformatic.db.json uses environment variables +for the server section:

{
"server": {
"logger": {
"level": "{PLT_SERVER_LOGGER_LEVEL}"
},
"hostname": "{PLT_SERVER_HOSTNAME}",
"port": "{PORT}"
}
}

Configure environment

Start with your local environment, create a .env file and put the following:

PORT=3042
PLT_SERVER_HOSTNAME=127.0.0.1
PLT_SERVER_LOGGER_LEVEL=debug
DATABASE_URL=sqlite://.platformatic/data/movie-quotes.db

Avoid accidental leaks by ignoring your .env file:

echo ".env" >> .gitignore

This same configuration needs to added to fly.toml:

[env]
PORT = 8080
PLT_SERVER_HOSTNAME = "0.0.0.0"
PLT_SERVER_LOGGER_LEVEL = "info"
DATABASE_URL = "sqlite:///app/.platformatic/data/movie-quotes.db"

Deploy application

A valid package.json will be needed so if you do not have one, generate one +by running npm init.

In your package.json, make sure there is a start script to run your +application:

{
"scripts": {
"start": "platformatic db"
}
}

Before deploying, make sure a .dockerignore file is created:

cp .gitignore .dockerignore

Finally, deploy the application to Fly by running:

fly deploy
+ + + + \ No newline at end of file diff --git a/docs/0.41.2/guides/deployment/index.html b/docs/0.41.2/guides/deployment/index.html new file mode 100644 index 00000000000..d202a7c1039 --- /dev/null +++ b/docs/0.41.2/guides/deployment/index.html @@ -0,0 +1,46 @@ + + + + + +Deployment | Platformatic Open Source Software + + + + + +
+
Version: 0.41.2

Deployment

Applications built with Platformatic DB can be deployed to a hosting service +in the same way as any other Node.js application. This guide covers a few +things that will help smooth the path from development to production.

Running a Platformatic DB application

Make the Platformatic CLI available

To run a Platformatic DB application, the Platformatic CLI must be available +in the production environment. The most straightforward way of achieving this +is to install it as a project dependency. +This means that when npm install (or npm ci) is run as part of your +build/deployment process, the Platformatic CLI will be installed.

Define an npm run script

A number of hosting services will automatically detect if your project's +package.json has a start npm run script. They will then execute the command +npm start to run your application in production.

You can add platformatic db start as the command for your project's start +npm run script, for example:

{
...
"scripts": {
"start": "platformatic db start",
},
}

Server configuration

info

See the Configuration reference for all +configuration settings.

Configuration with environment variables

We recommend that you use environment variable placeholders +in your Platformatic DB configuration. This will allow you to configure +different settings in your development and production environments.

In development you can set the environment variables via a .env file +that will be automatically loaded by Platformatic DB. For example:

PORT=3042
PLT_SERVER_HOSTNAME=127.0.0.1

In production your hosting provider will typically provide their own mechanism +for setting environment variables.

Configure the server port

Configure the port that the server will listen on by setting an environment +variable placeholder in your Platformatic DB configuration file:

platformatic.db.json
{
"server": {
...
"port": "{PORT}"
},
...
}

Listen on all network interfaces

Most hosting providers require that you configure your server to bind to all +available network interfaces. To do this you must set the server hostname to +0.0.0.0.

This can be handled with an environment variable placeholder in your Platformatic +DB configuration file:

platformatic.db.json
{
"server": {
...
"hostname": "{PLT_SERVER_HOSTNAME}",
},
...
}

The environment variable PLT_SERVER_HOSTNAME should then be set to 0.0.0.0 +in your hosting environment.

Security considerations

We recommend disabling the GraphiQL web UI in production. It can be disabled +with the following configuration:

platformatic.db.json
{
"db": {
...
"graphql": {
"graphiql": false
}
},
...
}

If you want to use this feature in development, replace the configuration +values with environment variable placeholders +so you can set it to true in development and false in production.

Removing the welcome page

If you want to remove the welcome page, you should register an index route.

module.exports = async function (app) {
// removing the welcome page
app.get('/', (req, reply) => {
return { hello: 'world' }
})
}

Databases

Applying migrations

If you're running a single instance of your application in production, it's +best to allow Platformatic DB to automatically run migrations when the server +starts is. This reduces the chance of a currently running instance using a +database structure it doesn't understand while the new version is still being +deployed.

SQLite

When using an SQLite database, you can ensure you don’t commit it to your Git +repository by adding the SQLite database filename to your .gitignore file. +The SQLite database file will be automatically generated by Platformatic DB +when your application migrations are run in production.

+ + + + \ No newline at end of file diff --git a/docs/0.41.2/guides/dockerize-platformatic-app/index.html b/docs/0.41.2/guides/dockerize-platformatic-app/index.html new file mode 100644 index 00000000000..7452a030b65 --- /dev/null +++ b/docs/0.41.2/guides/dockerize-platformatic-app/index.html @@ -0,0 +1,20 @@ + + + + + +Dockerize a Platformatic App | Platformatic Open Source Software + + + + + +
+
Version: 0.41.2

Dockerize a Platformatic App

This guide explains how to create a new Platformatic DB app, which connects to a PostgreSQL database.

We will then create a docker-compose.yml file that will run both services in separate containers

Generate a Platformatic DB App

Run this command in your terminal to start the Platformatic creator wizard:

npm create platformatic@latest

This interactive command-line tool will ask you some questions about how you'd +like to set up your new Platformatic project. For this guide, select these options:

- Which kind of project do you want to create?  => DB
- Where would you like to create your project? => quick-start
- Do you want to create default migrations? => Yes
- Do you want to create a plugin? => Yes
- Do you want to use TypeScript? => No
- Do you want to install dependencies? => Yes (this can take a while)
- Do you want to apply the migrations? => Yes
- Do you want to generate types? => Yes
- Do you want to create the github action to deploy this application to Platformatic Cloud dynamic workspace? => No
- Do you want to create the github action to deploy this application to Platformatic Cloud static workspace? => No

Once the wizard is complete, you'll have a Platformatic app project in the +folder quick-start, with example migration files and a plugin script.

info

Make sure you run the npm/yarn/pnpm command install command manually if you +don't ask the wizard to do it for you.

Create Docker image for the Platformatic DB App

In this step you are going to create some files into the root project directory

  • .dockerignore - This file tells Docker to ignore some files when copying the directory into the image filesystem
node_modules
.env*
  • start.sh - This is our entrypoint. We will run migrations then start platformatic
#!/bin/sh

echo "Running migrations..." && \
npx platformatic db migrations apply && \
echo "Starting Platformatic App..." && \
npm start
info

Make sure you make this file executable with the command chmod +x start.sh

  • Dockerfile - This is the file Docker uses to create the image
FROM node:18-alpine
WORKDIR /usr/src/app
COPY . .
RUN npm install
COPY . .
EXPOSE 3042
CMD [ "./start.sh" ]

At this point you can build your Docker image with the command

$ docker build -t platformatic-app .

Create Docker Compose config file

docker-compose.yml is the configuration file for docker-compose which will spin up containers for both PostgresSQL and our Platformatic App

version: "3.3"
services:
postgresql:
ports:
- "5433:5432"
image: "postgres:15-alpine"
environment:
- POSTGRES_PASSWORD=postgres
platformatic:
ports:
- "3042:3042"
image: 'platformatic-app:latest'
depends_on:
- postgresql
links:
- postgresql
environment:
PLT_SERVER_HOSTNAME: ${PLT_SERVER_HOSTNAME}
PORT: ${PORT}
PLT_SERVER_LOGGER_LEVEL: ${PLT_SERVER_LOGGER_LEVEL}
DATABASE_URL: postgres://postgres:postgres@postgresql:5432/postgres

A couple of things to notice:

  • The Platformatic app is started only once the database container is up and running (depends_on).
  • The Platformatic app is linked with postgresql service. Meaning that inside its container ping postgresql will be resolved with the internal ip of the database container.
  • The environment is taken directly from the .env file created by the wizard

You can now run your containers with

$ docker-compose up # (-d if you want to send them in the background)

Everything should start smoothly, and you can access your app pointing your browser to http://0.0.0.0:3042

To stop the app you can either press CTRL-C if you are running them in the foreground, or, if you used the -d flag, run

$ docker-compose down
+ + + + \ No newline at end of file diff --git a/docs/0.41.2/guides/generate-frontend-code-to-consume-platformatic-rest-api/index.html b/docs/0.41.2/guides/generate-frontend-code-to-consume-platformatic-rest-api/index.html new file mode 100644 index 00000000000..c30d54df731 --- /dev/null +++ b/docs/0.41.2/guides/generate-frontend-code-to-consume-platformatic-rest-api/index.html @@ -0,0 +1,32 @@ + + + + + +Generate Front-end Code to Consume Platformatic REST API | Platformatic Open Source Software + + + + + +
+
Version: 0.41.2

Generate Front-end Code to Consume Platformatic REST API

By default, a Platformatic app exposes REST API that provide CRUD (Create, Read, +Update, Delete) functionality for each entity (see the +Introduction to the REST API +documentation for more information on the REST API).

Platformatic CLI allows to auto-generate the front-end code to import in your +front-end application to consume the Platformatic REST API.

This guide

  • Explains how to create a new Platformatic app.
  • Explains how to configure the new Platformatic app.
  • Explains how to create a new React or Vue.js front-end application.
  • Explains how to generate the front-end TypeScript code to consume the Platformatic app REST API.
  • Provide some React and Vue.js components (either of them written in TypeScript) that read, create, and update an entity.
  • Explains how to import the new component in your front-end application.

Create a new Platformatic app

Run this command in your terminal to start the Platformatic creator wizard:

npm create platformatic@latest

This interactive command-line tool will ask you some questions about how you'd +like to set up your new Platformatic project. For this guide, select these options:

- Which kind of project do you want to create?  => DB
- Where would you like to create your project? => quick-start
- Do you want to create default migrations? => Yes
- Do you want to create a plugin? => Yes
- Do you want to use TypeScript? => No
- Do you want to install dependencies? => Yes (this can take a while)
- Do you want to apply the migrations? => Yes
- Do you want to generate types? => Yes
- Do you want to create the github action to deploy this application to Platformatic Cloud dynamic workspace? => No
- Do you want to create the github action to deploy this application to Platformatic Cloud static workspace? => No

Once the wizard is complete, you'll have a Platformatic app project in the +folder quick-start, with example migration files and a plugin script.

info

Make sure you run the npm/yarn/pnpm command install command manually if you +don't ask the wizard to do it for you.

Configure the new Platformatic app

documentation to create a new Platformatic app. Every Platformatic app uses the "Movie" demo entity and includes +the corresponding table, migrations, and REST API to create, read, update, and delete movies.

Once the new Platformatic app is ready:

  • Set up CORS in platformatic.db.json
{
"$schema": "https://platformatic.dev/schemas/v0.24.0/db",
"server": {
"hostname": "{PLT_SERVER_HOSTNAME}",
"port": "{PORT}",
"logger": {
"level": "{PLT_SERVER_LOGGER_LEVEL}"
},
+ "cors": {
+ "origin": {
+ "regexp": "/*/"
+ }
+ }
},
...
}

You can find more details about the cors configuration here.

  • launch Platformatic through npm start. +Then, the Platformatic app should be available at the http://127.0.0.1:3042/ URL.

Create a new Front-end Application

Refer to the Scaffolding Your First Vite Project +documentation to create a new front-end application, and call it "rest-api-frontend".

info

Please note Vite is suggested only for practical reasons, but the bundler of choice does not make any difference.

If you are using npm 7+ you should run

npm create vite@latest rest-api-frontend -- --template react-ts

and then follow the Vite's instructions

Scaffolding project in /Users/noriste/Sites/temp/platformatic/rest-api-frontend...

Done. Now run:

cd rest-api-frontend
npm install
npm run dev

Once done, the front-end application is available at http://localhost:5174/.

Generate the front-end code to consume the Platformatic app REST API

Now that either the Platformatic app and the front-end app are running, go to the front-end codebase and run the Platformatic CLI

cd rest-api-frontend/src
npx platformatic frontend http://127.0.0.1:3042 ts

Refer to the Platformatic CLI frontend command +documentation to know about the available options.

The Platformatic CLI generates

  • api.d.ts: A TypeScript module that includes all the OpenAPI-related types. +Here is part of the generated code
interface GetMoviesRequest {
'limit'?: number;
'offset'?: number;
// ... etc.
}

interface GetMoviesResponseOK {
'id'?: number;
'title': string;
}


// ... etc.

export interface Api {
setBaseUrl(baseUrl: string): void;
getMovies(req: GetMoviesRequest): Promise<Array<GetMoviesResponseOK>>;
createMovie(req: CreateMovieRequest): Promise<CreateMovieResponseOK>;
// ... etc.
}
  • api.ts: A TypeScript module that includes a typed function for every single OpenAPI endpoint. +Here is part of the generated code
import type { Api } from './api-types'

let baseUrl = ''
export function setBaseUrl(newUrl: string) { baseUrl = newUrl };

export const createMovie: Api['createMovie'] = async (request) => {
const response = await fetch(`${baseUrl}/movies/`, {
method:'post',
body: JSON.stringify(request),
headers: {
'Content-Type': 'application/json'
}
})

if (!response.ok) {
throw new Error(await response.text())
}

return await response.json()
}

// etc.

You can add a --name option to the command line to provide a custom name for the generated files.

cd rest-api-frontend/src
npx platformatic frontend --name foobar http://127.0.0.1:3042 ts

will generated foobar.ts and foobar-types.d.ts

React and Vue.js components that read, create, and update an entity

You can copy/paste the following React or Vue.js components that import the code +the Platformatic CLI generated.

Create a new file src/PlatformaticPlayground.tsx and copy/paste the following code.

import { useEffect, useState } from 'react'

// getMovies, createMovie, and updateMovie are all functions automatically generated by Platformatic
// in the `api.ts` module.
import { getMovies, createMovie, updateMovie, setBaseUrl } from './api'

setBaseUrl('http://127.0.0.1:3042') // configure this according to your needs

export function PlatformaticPlayground() {
const [movies, setMovies] = useState<Awaited<ReturnType<typeof getMovies>>>([])
const [newMovie, setNewMovie] = useState<Awaited<ReturnType<typeof createMovie>>>()

async function onCreateMovie() {
const newMovie = await createMovie({ title: 'Harry Potter' })
setNewMovie(newMovie)
}

async function onUpdateMovie() {
if (!newMovie || !newMovie.id) return

const updatedMovie = await updateMovie({ id: newMovie.id, title: 'The Lord of the Rings' })
setNewMovie(updatedMovie)
}

useEffect(() => {
async function fetchMovies() {
const movies = await getMovies({})
setMovies(movies)
}

fetchMovies()
}, [])

return (
<>
<h2>Movies</h2>

{movies.length === 0 ? (
<div>No movies yet</div>
) : (
<ul>
{movies.map((movie) => (
<li key={movie.id}>{movie.title}</li>
))}
</ul>
)}

<button onClick={onCreateMovie}>Create movie</button>
<button onClick={onUpdateMovie}>Update movie</button>

{newMovie && <div>Title: {newMovie.title}</div>}
</>
)
}

Import the new component in your front-end application

You need to import and render the new component in the front-end application.

Change the App.tsx as follows

import { useState } from 'react'
import reactLogo from './assets/react.svg'
import viteLogo from '/vite.svg'
import './App.css'

+import { PlatformaticPlayground } from './PlatformaticPlayground'

function App() {
const [count, setCount] = useState(0)

return (
<>
+ <PlatformaticPlayground />
<div>
<a href="https://vitejs.dev" target="_blank">
<img src={viteLogo} className="logo" alt="Vite logo" />
</a>
<a href="https://react.dev" target="_blank">
<img src={reactLogo} className="logo react" alt="React logo" />
</a>
</div>
<h1>Vite + React</h1>
<div className="card">
<button onClick={() => setCount((count) => count + 1)}>count is {count}</button>
<p>
Edit <code>src/App.tsx</code> and save to test HMR
</p>
</div>
<p className="read-the-docs">Click on the Vite and React logos to learn more</p>
</>
)
}

export default App

Have fun

Art the top of the front-end application the new component requests the movies to the Platformatic app and list them.

Platformatic frontend guide: listing the movies

Click on "Create movie" to create a new movie called "Harry Potter".

Platformatic frontend guide: creating a movie

Click on "Update movie" to rename "Harry Potter" into "Lord of the Rings".

Platformatic frontend guide: editing a movie

Reload the front-end application to see the new "Lord of the Rings" movie listed.

Platformatic frontend guide: listing the movies +.

+ + + + \ No newline at end of file diff --git a/docs/0.41.2/guides/jwt-auth0/index.html b/docs/0.41.2/guides/jwt-auth0/index.html new file mode 100644 index 00000000000..f61e4a11de1 --- /dev/null +++ b/docs/0.41.2/guides/jwt-auth0/index.html @@ -0,0 +1,21 @@ + + + + + +Configure JWT with Auth0 | Platformatic Open Source Software + + + + + +
+
Version: 0.41.2

Configure JWT with Auth0

Auth0 is a powerful authentication and authorization service provider that can be integrated with Platformatic DB through JSON Web Tokens (JWT) tokens. +When a user is authenticated, Auth0 creates a JWT token with all necessary security informations and custom claims (like the X-PLATFORMATIC-ROLE, see User Metadata) and signs the token.

Platformatic DB needs the correct public key to verify the JWT signature. +The fastest way is to leverage JWKS, since Auth0 exposes a JWKS endpoint for each tenant. +Given a Auth0 tenant's issuer URL, the (public) keys are accessible at ${issuer}/.well-known/jwks.json. +For instance, if issuer is: https://dev-xxx.us.auth0.com/, the public keys are accessible at https://dev-xxx.us.auth0.com/.well-known/jwks.json

To configure Platformatic DB authorization to use JWKS with Auth0, set:


...
"authorization": {
"jwt": {
"jwks": {
"allowedDomains": [
"https://dev-xxx.us.auth0.com/"
]
}
},
}
...

danger

Note that specify allowedDomains is critical to correctly restrict the JWT that MUST be issued from one of the allowed domains.

Custom Claim Namespace

In Auth0 there are restrictions about the custom claim that can be set on access tokens. One of these is that the custom claims MUST be namespaced, i.e. we cannot have X-PLATFORMATIC-ROLE but we must specify a namespace, e.g.: https://platformatic.dev/X-PLATFORMATIC-ROLE

To map these claims to user metadata removing the namespace, we can specify the namespace in the JWT options:

...
"authorization": {
"jwt": {
"namespace": "https://platformatic.dev/",
"jwks": {
"allowedDomains": [
"https://dev-xxx.us.auth0.com/"
]
}
},
}
...

With this configuration, the https://platformatic.dev/X-PLATFORMATIC-ROLE claim is mapped to X-PLATFORMATIC-ROLE user metadata.

+ + + + \ No newline at end of file diff --git a/docs/0.41.2/guides/migrating-express-app-to-platformatic-service/index.html b/docs/0.41.2/guides/migrating-express-app-to-platformatic-service/index.html new file mode 100644 index 00000000000..a4f94222db4 --- /dev/null +++ b/docs/0.41.2/guides/migrating-express-app-to-platformatic-service/index.html @@ -0,0 +1,17 @@ + + + + + +Migrating an Express app to Platformatic Service | Platformatic Open Source Software + + + + + +
+
Version: 0.41.2

Migrating an Express app to Platformatic Service

Introduction

Our open-source tools are built on top of the modern and flexible Fastify web framework. It provides logging, request validation and a powerful plugin system out-of-the-box, as well as incredible performance.

If you have an existing Express application, migrating it to Fastify could potentially be time consuming, and might not be something that you're able to prioritise right now. You can however still take advantage of Fastify and our open-source tools. In this guide you'll learn how to use the @fastify/express plugin to help you rapidly migrate your existing Express application to use Platformatic Service.

This guide assumes that you have some experience building applications with the Express framework.

Example Express application

For the purpose of this guide, we have a basic example Express application. Although this app has a specific structure, the migration steps covered in this guide can generally be applied to any Express application.

The code for the example Express and migrated Platformatic Service applications is available on GitHub.

Here's the structure of the example Express application:

├── app.js
├── package.json
├── routes
│ └── users.js
└── server.js

It has the following dependencies:

// package.json

"dependencies": {
"express": "^4.18.2"
}

The application has routes in routes/users.js:

// routes/users.js

import express from 'express'

const router = express.Router()

router.use(express.json())

router.post('/', function createUser(request, response, next) {
const newUser = request.body

if (!newUser) {
return next(new Error('Error creating user'))
}

response.status(201).json(newUser)
})

router.get('/:user_id', function getUser(request, response, next) {
const user = {
id: Number(request.params.user_id),
first_name: 'Bobo',
last_name: 'Oso'
}

response.json(user)
})

export const usersRoutes = router

In app.js, we have a factory function that creates a new Express server instance and mounts the routes:

// app.js

import express from 'express'

import { usersRoutes } from './routes/users.js'

export default function buildApp() {
const app = express()

app.use('/users', usersRoutes)

return app
}

And in server.js we're calling the factory function and starting the server listening for HTTP requests:

// server.js

import buildApp from './app.js'

const express = buildApp()

express.listen(3042, () => {
console.log('Example app listening at http://localhost:3042')
})

The routes in your Express application should be mounted on an Express router (or multiple routers if needed). This will allow them to be mounted using @fastify/express when you migrate your app to Platformatic Service.

Creating a new Platformatic Service app

To migrate your Express app to Platformatic Service, create a new Platformatic Service app with:

npm create platformatic@latest

Be sure to select Service as the project type. You should also say yes when you're asked if you want to create the GitHub Actions workflows for deploying your application to Platformatic Cloud.

Once the project has been created, you can delete the example plugins and routes directories.

Using ES modules

If you're using ES modules in the Express application code that you'll be migrating, ensure that there's a type field in package.json set to module:

npm pkg set type=module

Migrate the Express routes

Copy over the routes directory from your Express app.

Install @fastify/express

Install the @fastify/express Fastify plugin to add full Express compability to your Platformatic Service app:

npm install @fastify/express

Mounting the Express routes

Create a root Fastify plugin that register's the @fastify/express plugin and loads your Express routes:

// root-plugin.js

import { usersRoutes } from './routes/users.js'

/** @param {import('fastify').FastifyInstance} app */
export default async function (app) {
await app.register(import('@fastify/express'))

app.use('/users', usersRoutes)
}

Configuring the Platformatic Service app

Edit your app's platformatic.service.json to load your root plugin:

// platformatic.service.json

{
...,
"plugins": {
"paths": [{
"path": "./root-plugin.js",
"encapsulate": false
}],
"hotReload": false
},
"watch": false
}

These settings are important when using @fastify/express in a Platformatic Service app:

  • encapsulate — You'll need to disable encapsulation for any Fastify plugin which mounts Express routes. This is due to the way that @fastify/express works.
  • hotReload and watch — You'll need to disable hot reloading and watching for your app, as they don't currently work when using @fastify/express. This is a known issue that we're working to fix.

Wrapping up

You can learn more about building Node.js apps with Platformatic service in the Platformatic Service documentation.

Once you've migrated your Express app to use Platformatic Service with @fastify/express, you might then want to consider fully migrating your Express routes and application code to Fastify. This tutorial shows how you can approach that migration process: How to migrate your app from Express to Fastify (video).

+ + + + \ No newline at end of file diff --git a/docs/0.41.2/guides/migrating-fastify-app-to-platformatic-service/index.html b/docs/0.41.2/guides/migrating-fastify-app-to-platformatic-service/index.html new file mode 100644 index 00000000000..b0d345d1d1b --- /dev/null +++ b/docs/0.41.2/guides/migrating-fastify-app-to-platformatic-service/index.html @@ -0,0 +1,17 @@ + + + + + +Migrating a Fastify app to Platformatic Service | Platformatic Open Source Software + + + + + +
+
Version: 0.41.2

Migrating a Fastify app to Platformatic Service

Introduction

Building production ready Node.js application with Fastify can require a certain amount of boilerplate code. This is a side effect of some of Fastify's technical principles:

  • If it can be a plugin, it should be a pluginPlugins help with the separation of concerns, they improve testability, and also provide a way to logically organise and structure your applications.
  • Developer choice = developer freedom — Fastify only applies a few strong opinions, in key areas such as logging and validation. The framework features have been designed to give you the freedom to build your applications however you want.
  • You know your needs best — Fastify doesn't make assumptions about what plugins you'll need in your application. As the Fastify plugin ecosystem and the community has grown, a clear group of popular plugin choices has emerged.

Platformatic Service is the natural evolution of the build-it-from-scratch Fastify development experience. It provides a solid foundation for building Node.js applications on top of Fastify, with best practices baked in.

See the Building apps with Platformatic Service section of this guide to learn more about the built-in features.

The good news is that the path to migrate a Fastify application to use Platformatic Service is fairly straightforward. This guide covers some of the things you'll need to know when migrating an application, as well as tips on different migration approaches.

This guide assumes that you have some experience building applications with the Fastify framework. If you'd like to learn more about about building web applications with Fastify, we recommend taking a look at:

Example Fastify application

For the purpose of this guide, we have a basic example Fastify application. Although this app has a specific structure, the migration steps covered in this guide can generally be applied to any Fastify application.

The code for the example Fastify and migrated Platformatic Service applications is available on GitHub.

Here's the structure of the example Fastify application:

├── app.js
├── package.json
├── plugins
│   └── data-source.js
├── routes
│   ├── movies.js
│   └── quotes.js
├── server.js
└── test
└── routes.test.js

It has the following dependencies:

// package.json

"dependencies": {
"fastify": "^4.17.0",
"fastify-plugin": "^4.5.0"
}

The application has a plugin that decorates the Fastify server instance, as well as two Fastify plugins which define API routes. Here's the code for them:

// plugins/data-source.js

import fastifyPlugin from 'fastify-plugin'

/** @param {import('fastify').FastifyInstance} app */
async function dataSource (app) {
app.decorate('movies', [
'Jaws',
'Star Wars',
'The Wizard of Oz'
])

app.decorate('quotes', [
'You\'re gonna need a bigger boat.',
'May the Force be with you.',
'Toto, I\'ve got a feeling we\'re not in Kansas anymore.'
])
}

export default fastifyPlugin(dataSource)

fastify-plugin is used to to prevent Fastify from creating a new encapsulation context for the plugin. This makes the decorators that are registered in the dataSource plugin available in the route plugins. You can learn about this fundamental Fastify concept in the Fastify Encapsulation documentation.

// routes/movies.js

/** @param {import('fastify').FastifyInstance} app */
export default async function movieRoutes (app) {
app.get('/', async (request, reply) => {
return app.movies
})
}
// routes/quotes.js

/** @param {import('fastify').FastifyInstance} app */
export default async function quotesRoutes (app) {
app.get('/', async (request, reply) => {
return app.quotes
})
}

The route plugins aren't registering anything that needs to be available in other plugins. They have their own encapsulation context and don't need to be wrapped with fastify-plugin.

There's also a buildApp() factory function in app.js, which takes care of creating a new Fastify server instance and registering the plugins and routes:

// app.js

import fastify from 'fastify'

export async function buildApp (options = {}) {
const app = fastify(options)

app.register(import('./plugins/data-source.js'))

app.register(import('./routes/movies.js'), { prefix: '/movies' })
app.register(import('./routes/quotes.js'), { prefix: '/quotes' })

return app
}

And server.js, which calls the buildApp function to create a new Fastify server, and then starts it listening:

// server.js

import { buildApp } from './app.js'

const port = process.env.PORT || 3042
const host = process.env.HOST || '127.0.0.1'

const options = {
logger: {
level: 'info'
}
}

const app = await buildApp(options)

await app.listen({ port, host })

As well as a couple of tests for the API routes:

// tests/routes.test.js

import { test } from 'node:test'
import assert from 'node:assert/strict'

import { buildApp } from '../app.js'

test('Basic API', async (t) => {
const app = await buildApp()

t.after(async () => {
await app.close()
})

await t.test('GET request to /movies route', async () => {
const response = await app.inject({
method: 'GET',
url: '/movies'
})

assert.equal(response.statusCode, 200)
assert.deepEqual(response.json(), [
'Jaws',
'Star Wars',
'The Wizard of Oz'
])
})

await t.test('GET request to /quotes route', async () => {
const response = await app.inject({
method: 'GET',
url: '/quotes'
})

assert.equal(response.statusCode, 200)
assert.deepEqual(response.json(), [
'You\'re gonna need a bigger boat.',
'May the Force be with you.',
'Toto, I\'ve got a feeling we\'re not in Kansas anymore.'
])
})
})

These tests are using the built in Node.js test runner, node:test. They can be run with the command: node --test --test-reporter=spec test/*.test.js.

The @param lines in this application code are JSDoc blocks that import the FastifyInstance type. This allows many code editors to provide auto-suggest, type hinting and type checking for your code.

Creating a new Platformatic Service app

To migrate your Fastify app to Platformatic Service, create a new Platformatic Service app with:

npm create platformatic@latest

Be sure to select Service as the project type. Once the project has been created, you can delete the example plugins and routes directories.

App configuration

The configuration for the Platformatic Service app is stored in platformatic.service.json.

The generated configuration is set up to load plugins from the plugins and routes directories:

// platformatic.service.json

"plugins": {
"paths": [
"./plugins",
"./routes"
]
}

The value for any configuration setting in platformatic.service.json can be replaced with an environment variable by adding a placeholder, for example {PLT_SERVER_LOGGER_LEVEL}. In development, environment variables are automatically loaded by your Platformatic Service app from a .env file in the root directory of your app. In production, you'll typically set these environment variables using a feature provided by your hosting provider.

See the Platformatic Service documentation for Environment variable placeholders to learn more about how this works.

Using ES modules

If you're using ES modules in the Fastify application code that you'll be migrating, ensure that there's a type field in package.json set to module:

npm pkg set type=module

Refactoring Fastify server factories

If your Fastify application has a script with a factory function to create and build up a Fastify server instance, you can refactor it into a Fastify plugin and use it in your Platformatic Service app.

Here are a few things to consider while refactoring it:

  • Move the options you're passing to Fastify when creating a new server instance to the server block in platformatic.service.json. These options will be passed through directly by Platformatic Service when it creates a Fastify server instance.
  • You can create a root plugin to be loaded by your Platformatic Service app, for example: export default async function rootPlugin (app, options) { ... }
  • When you copy the code from your factory function into your root plugin, remove the code which is creating the Fastify server instance.
  • You can configure your Platformatic Service to load the root plugin, for example:
    "plugins": {
    "paths": ["./root-plugin.js"]
    }
  • If you need to pass options to your root plugin, you can do it like this:
    "plugins": {
    "paths": [
    {
    "path": "./root-plugin.js",
    "options": {
    "someOption": true
    }
    }
    ]
    }

Migrating plugins

Copy over the plugins directory from your Fastify app. You shouldn't need to make any modifications for them to work with Platformatic Service.

Disabling plugin encapsulation

Platformatic Service provides a configuration setting which enables you to disable encapsulation for a plugin, or all the plugins within a directory. This will make any decorators or hooks that you set in those plugins available to all other plugins. This removes the need for you to wrap your plugins with fastify-plugin.

To disable encapsulation for all plugins within the plugins directory, you would set your plugins configuration like this in platformatic.service.json:

// platformatic.service.json

"plugins": {
"paths": [
{
"path": "./plugins",
"encapsulate": false
},
"./routes"
]
}

You can learn more about plugin encapsulation in the Fastify Plugins Guide.

Migrating routes

Copy over the routes directory from your Fastify app.

Explicit route paths

If you're registering routes in your Fastify application with full paths, for example /movies, you won't need to make any changes to your route plugins.

Route prefixing with file-system based routing

If you're using the prefix option when registering route plugins in your Fastify application, for example:

app.register(import('./routes/movies.js'), { prefix: '/movies' })

You can achieve the same result with Platformatic Service by using file-system based routing. With the following directory and file structure:

routes/
├── movies
│   └── index.js
└── quotes
└── index.js

Assuming that both of the route files register a / route, these are the route paths that will be registered in your Platformatic Service app:

/movies
/quotes

With the example Fastify application, this would mean copying the route files over to these places in the Platformatic Service app:

routes/movies.js -> routes/movies/index.js
routes/quotes.js -> routes/quotes/index.js

How does this work? Plugins are loaded with the @fastify/autoload Fastify plugin. The dirNameRoutePrefix plugin option for @fastify/autoload is enabled by default. This means that "routes will be automatically prefixed with the subdirectory name in an autoloaded directory".

If you'd prefer not to use file-system based routing with Platformatic Service, you can add prefixes to the paths for the routes themselves (see Explicit route paths).

Adapting existing usage of @fastify/autoload

If you're using @fastify/autoload in your Fastify application, there are a couple of approaches you can take when migrating the app to Platformatic Service:

  • Configure plugins in your Platformatic Service app's platformatic.service.json. It will then take care of loading your routes and plugins for you with @fastify/autoload (configuration documentation).
  • You can continue to use @fastify/autoload directly with a little refactoring. See the tips in the Refactoring Fastify server factories section.

Migrating tests

You'll generally use the Platformatic CLI to start your Platformatic Service app (npx platformatic start). However for testing, you can use the programmatic API provided by Platformatic Service. This allows you to load your app in your test scripts and then run tests against it.

If you copy over the tests from your existing Fastify app, they will typically only require a small amount of refactoring to work with Platformatic Service.

Replacing your Fastify server factory function

The example Fastify app has a buildApp() factory function which creates a Fastify server instance. The import line for that function can be removed from tests/routes.test.js:

// tests/routes.test.js

import { buildApp } from '../app.js'

And replaced with an import of the buildServer() function from @platformatic/service:

// tests/routes.test.js

import { buildServer } from '@platformatic/service'

You can then load your Platformatic Service app like this:


const app = await buildServer('./platformatic.service.json')

Disabling server logging in your tests

If you have logged enabled for your Platformatic Service app, you'll probably want to disable the logging in your tests to remove noise from the output that you receive when you run your tests.

Instead of passing the path to your app's configuration to buildServer(), you can import the app configuration and disable logging:

// tests/routes.test.js

import serviceConfig from '../platformatic.service.json' assert { type: 'json' }

serviceConfig.server.logger = false

Then pass that serviceConfig configuration object to the buildServer() function:

// tests/routes.test.js

const app = await buildServer(serviceConfig)

Import assertions — the assert { type: 'json' } syntax — are not a stable feature of the JavaScript language, so you'll receive warning messages from Node.js when running your tests. You can disable these warnings by passing the --no-warnings flag to node.

Building apps with Platformatic Service

Because Platformatic Service is built on top of the Fastify framework, you're able to use the full functionality of the Fastify framework in your Platformatic Service app. This includes:

  • Fast, structured logging, provided by Pino
  • Request validation with JSON Schema and Ajv (other validation libraries are supported too)
  • Hooks, which allow fine grained control over when code is run during the request/response lifecycle.
  • Decorators, which allow you to customize core Fastify objects and write more modular code.

Platformatic Service also provides many other features that are built on top of Fastify.

Application features

All Platformatic Service features are fully configurable via platformatic.service.json.

Development features

  • Hot reloading — Your server will automatically reload in development as you develop features.
  • Write your plugins in JavaScript or TypeScript — TypeScript support is provided out-of-the-box and supports hot reloading.
  • Pretty printed logs — Making it easier to understand and debug your application during development.

See the Platformatic Service Configuration documentation for all of the features which can be configured.

Next steps

The documentation for Platformatic Service is a helpful reference when building a Platformatic Service app.

Watch: Understand the parts of a Platformatic app

You want to be confident that you understand how your applications work. In this video you'll learn about the parts that make up a Platformatic application, what each part does, and how they fit together.

Our series of Platformatic How-to videos can help get you up and running building apps with Platformatic open-source tools.

Got questions or need help migrating your Fastify app to use Platformatic Service? Drop by our Discord server and we'll be happy to help you.

+ + + + \ No newline at end of file diff --git a/docs/0.41.2/guides/monitoring/index.html b/docs/0.41.2/guides/monitoring/index.html new file mode 100644 index 00000000000..752e2dea79d --- /dev/null +++ b/docs/0.41.2/guides/monitoring/index.html @@ -0,0 +1,24 @@ + + + + + +Monitoring with Prometheus and Grafana | Platformatic Open Source Software + + + + + +
+
Version: 0.41.2

Monitoring with Prometheus and Grafana

Prometheus is open source system and alerting toolkit for monitoring and alerting. It's a time series database that collects metrics from configured targets at given intervals, evaluates rule expressions, displays the results, and can trigger alerts if some condition is observed to be true. +Grafana is an open source visualization and analytics software.

It's a pretty common solution to use Prometheus to collect and store monitoring data, and Grafana to visualize it.

Platformatic can be configured to expose Prometheus metrics:

...
"metrics": {
"port": 9091,
"auth": {
"username": "platformatic",
"password": "mysecret"
}
}
...

In this case, we are exposing the metrics on port 9091 (defaults to 9090), and we are using basic authentication to protect the endpoint. +We can also specify the IP address to bind to (defaults to 0.0.0.0). +Note that the metrics port is not the default in this configuration. This is because if you want to test the integration running both Prometheus and Platformatic on the same host, Prometheus starts on 9090 port too. +All the configuration settings are optional. To use the default settings, set "metrics": true. See the configuration reference for more details.

caution

Use environment variable placeholders in your Platformatic DB configuration file to avoid exposing credentials.

Prometheus Configuration

This is an example of a minimal Prometheus configuration to scrape the metrics from Platformatic:

global:
scrape_interval: 15s
scrape_timeout: 10s
evaluation_interval: 1m
scrape_configs:
- job_name: 'platformatic'
scrape_interval: 2s
metrics_path: /metrics
scheme: http
basic_auth:
username: platformatic
password: mysecret
static_configs:
- targets: ['192.168.69.195:9091']
labels:
group: 'platformatic'

We specify a target configuring the IP address and the port where Platformatic is running, and we specify the username and password to use for basic authentication. The metrics path is the one used by Platformatic. The ip address is not a loopback address so this will work even with Prometheus running in docker on the same host (see below), please change it to your host ip.

To test this configuration, we can run Prometheus locally using docker and docker-compose, so please be sure to have both correctly installed. +Save the above configuration in a file named ./prometheus/prometheus.yml and create a docker-compose.yml:

version: "3.7"

services:
prometheus:
image: prom/prometheus:latest
volumes:
- prometheus_data:/prometheus
- ./prometheus/prometheus.yml:/etc/prometheus/prometheus.yml
command:
- '--config.file=/etc/prometheus/prometheus.yml'
ports:
- '9090:9090'

volumes:
prometheus_data: {}

Then run docker-compose up -d and open http://localhost:9090 in your browser. You should see the Prometheus dashboard, and you can also query the metrics, e.g. {group="platformatic"}. See Prometheus docs for more information on querying and metrics.

Grafana Configuration

Let's see how we can configure Grafana to chart some Platformatics metrics from Prometheus. +Change the docker-compose.yml to add a grafana service:

version: "3.7"
services:

prometheus:
image: prom/prometheus:latest
volumes:
- prometheus_data:/prometheus
- ./prometheus/prometheus.yml:/etc/prometheus/prometheus.yml
command:
- '--config.file=/etc/prometheus/prometheus.yml'
ports:
- '9090:9090'

grafana:
image: grafana/grafana:latest
volumes:
- grafana_data:/var/lib/grafana
environment:
- GF_SECURITY_ADMIN_PASSWORD=pleasechangeme
depends_on:
- prometheus
ports:
- '3000:3000'

volumes:
prometheus_data: {}
grafana_data: {}

In Grafana, select Configuration -> Data Sources -> Add Data Source, and select Prometheus. +In the URL field, specify the URL of the Prometheus server, e.g. http://prometheus:9090 (the name of the service in the docker-compose file), then Save & Test.

Now we can create a dashboard and add panels to it. Select the Prometheus data source, and add queries. You should see the metrics exposed by Platformatic.

It's also possible to import pre-configured dashboards, like this one from Grafana.com.

+ + + + \ No newline at end of file diff --git a/docs/0.41.2/guides/packaging-an-application-as-a-module/index.html b/docs/0.41.2/guides/packaging-an-application-as-a-module/index.html new file mode 100644 index 00000000000..36718b61fbc --- /dev/null +++ b/docs/0.41.2/guides/packaging-an-application-as-a-module/index.html @@ -0,0 +1,27 @@ + + + + + +Packaging a Platformatic Application as a module | Platformatic Open Source Software + + + + + +
+
Version: 0.41.2

Packaging a Platformatic Application as a module

Platformatic Service and Platformatic DB +offer a good starting point to create new applications. However, most developers or organizations might want to +create reusable services or applications built on top of Platformatic. +This is useful to publish the application on the public npm registry (or a private one!), including building your own CLI, +or to create a specialized template for your organization to allow for centralized bugfixes and updates.

This process is the same one we use to maintain Platformatic DB and Platformatic Composer on top of Platformatic Service.

Creating a custom Service

We are creating the module foo.js as follows:

const { schema, platformaticService } = require('@platformatic/service')

/** @type {import('fastify').FastifyPluginAsync<{}>} */
async function foo (app, opts) {
const text = app.platformatic.config.foo.text
app.get('/foo', async (request, reply) => {
return text
})

await platformaticService(app, opts)
}

foo.configType = 'foo'

// break Fastify encapsulation
foo[Symbol.for('skip-override')] = true

// The schema for our configuration file
foo.schema = {
$id: 'https://example.com/schemas/foo.json',
title: 'Foo Service',
type: 'object',
properties: {
server: schema.server,
plugins: schema.plugins,
metrics: schema.metrics,
watch: {
anyOf: [schema.watch, {
type: 'boolean'
}, {
type: 'string'
}]
},
$schema: {
type: 'string'
},
module: {
type: 'string'
},
foo: {
type: 'object',
properties: {
text: {
type: 'string'
}
},
required: ['text']
}
},
additionalProperties: false,
required: ['server']
}

// The configuration for the ConfigManager
foo.configManagerConfig = {
schema: foo.schema,
envWhitelist: ['PORT', 'HOSTNAME'],
allowToWatch: ['.env'],
schemaOptions: {
useDefaults: true,
coerceTypes: true,
allErrors: true,
strict: false
}
}

module.exports = foo

Note that the $id property of the schema identifies the module in our system, +allowing us to retrieve the schema correctly. +It is recommended, but not required, that the JSON schema is actually +published in this location. Doing so allows tooling such as the VSCode +language server to provide autocompletion.

In this example, the schema adds a custom top-level foo property +that users can use to configure this specific module.

ESM is also supported.

Consuming a custom application

Consuming foo.js is simple. We can create a platformatic.json file as follows:

{
"$schema": "https://example.com/schemas/foo.json",
"module": "./foo",
"server": {
"port": 0,
"hostname": "127.0.0.1"
},
"foo": {
"text": "Hello World"
}
}

Note that we must specify both the $schema property and module. +Module can also be any modules published on npm and installed via your package manager.

Building your own CLI

It is possible to build your own CLI with the following cli.mjs file:

import foo from './foo.js'
import { start } from '@platformatic/service'
import { printAndExitLoadConfigError } from '@platformatic/config'

await start(foo, process.argv.splice(2)).catch(printConfigValidationErrors)

This will also load platformatic.foo.json files.

+ + + + \ No newline at end of file diff --git a/docs/0.41.2/guides/prisma/index.html b/docs/0.41.2/guides/prisma/index.html new file mode 100644 index 00000000000..0cc93dad5ac --- /dev/null +++ b/docs/0.41.2/guides/prisma/index.html @@ -0,0 +1,17 @@ + + + + + +Integrate Prisma with Platformatic DB | Platformatic Open Source Software + + + + + +
+
Version: 0.41.2

Integrate Prisma with Platformatic DB

Prisma is an open-source ORM for Node.js and TypeScript. It is used as an alternative to writing SQL, or using another database access tool such as SQL query builders (like knex.js) or ORMs (like TypeORM and Sequelize). Prisma currently supports PostgreSQL, MySQL, SQL Server, SQLite, MongoDB, and CockroachDB.

Prisma can be used with JavaScript or TypeScript, and provides a level to type-safety that goes beyond the guarantees made by other ORMs in the TypeScript ecosystem. You can find an in-depth comparison of Prisma against other ORMs here.

If you want to get a quick overview of how Prisma works, you can follow the Quickstart or read the Introduction in the Prisma documentation.

How Prisma can improve your workflow with Platformatic DB

While Platformatic speeds up development of your REST and GraphQL APIs, Prisma can complement the workflow in several ways:

  1. Provides an intuitive data modeling language
  2. Provides auto-generated and customizable SQL migrations
  3. Provides type-safety and auto-completion for your database queries

You can learn more about why Prisma and Platformatic are a great match this article.

Prerequisites

To follow along with this guide, you will need to have the following:

Setup Prisma

Install the Prisma CLI and the db-diff development dependencies in your project:

npm install --save-dev prisma @ruheni/db-diff

Next, initialize Prisma in your project

npx prisma init

This command does the following:

  • Creates a new directory called prisma which contains a file called schema.prisma. This file defines your database connection and the Prisma Client generator.
  • Creates a .env file at the root of your project if it doesn't exist. This defines your environment variables (used for your database connection).

You can specify your preferred database provider using the --datasource-provider flag, followed by the name of the provider:

npx prisma init --datasource-provider postgresql # or sqlite, mysql, sqlserver, cockroachdb

Prisma uses the DATABASE_URL environment variable to connect to your database to sync your database and Prisma schema. It also uses the variable to connect to your database to run your Prisma Client queries.

If you're using PostgreSQL, MySQL, SQL Server, or CockroachDB, ensure that the DATABASE_URL used by Prisma is the same as the one used by Platformatic DB project. If you're using SQLite, refer to the Using Prisma with SQLite section.

If you have an existing project, refer to the Adding Prisma to an existing Platformatic DB project section. If you're adding Prisma to a new project, refer to the Adding Prisma to a new project.

Adding Prisma to an existing project

If you have an existing Platformatic DB project, you can introspect your database and generate the data model in your Prisma schema with the following command:

npx prisma db pull

The command will introspect your database and generate the data model

Next, add the @@ignore attribute to the versions model to exclude it from the Prisma Client API:

model versions {
version BigInt @id
name String?
md5 String?
run_at DateTime? @db.Timestamptz(6)

+ @@ignore
}

To learn how you can evolve your database schema, you can jump to the Evolving your database schema section.

Adding Prisma to a new project

Define a Post model with the following fields at the end of your schema.prisma file:

prisma/schema.prisma
model Post {
id Int @id @default(autoincrement())
title String
content String?
published Boolean @default(false)
viewCount Int @default(0)
createdAt DateTime @default(now())

@@map("posts")
}

The snippet above defines a Post model with the following fields and properties:

  • id: An auto-incrementing integer that will be the primary key for the model.
  • title: A non-nullable String field.
  • content: A nullable String field.
  • published: A Boolean field with a default value of false.
  • viewCount: An Int field with a default value of 0.
  • createdAt: A DateTime field with a timestamp of when the value is created as its default value.

By default, Prisma maps the model name and its format to the table name — which is also used im Prisma Client. Platformatic DB uses a snake casing and pluralized table names to map your table names to the generated API. The @@map() attribute in the Prisma schema allows you to define the name and format of your table names to be used in your database. You can also use the @map() attribute to define the format for field names to be used in your database. Refer to the Foreign keys and table names naming conventions section to learn how you can automate formatting foreign keys and table names.

Next, run the following command to generate an up and down migration:

npx db-diff

The previous command will generate both an up and down migration based on your schema. The generated migration is stored in your ./migrations directory. If you are currently using a different path to store the migration, you can provide the --migrations-dir flag followed by the path.

You can then apply the generated migration using the Platformatic DB CLI:

npx platformatic db migrations apply

Platformatic uses Postgrator to run migrations. Postgrator creates a table in the database called versions to track the applied migrations. Since the versions table is not yet captured in the Prisma schema, run the following command to introspect the database and populate it with the missing model:

npx prisma db pull

Introspecting the database to populate the model prevents including the versions table in the generated down migrations.

Your Prisma schema should now contain a versions model that is similar to this one (it will vary depending on the database system you're using):

model Post {
id Int @id @default(autoincrement())
title String
content String?
published Boolean @default(false)
viewCount Int @default(0)
createdAt DateTime @default(now())

@@map("posts")
}

+model versions {
+ version BigInt @id
+ name String?
+ md5 String?
+ run_at DateTime? @db.Timestamptz(6)
+}

Add the @@ignore attribute function to the model to exclude it from the Prisma Client API:

model versions {
version BigInt @id
name String?
md5 String?
run_at DateTime? @db.Timestamptz(6)

+ @@ignore
}

Evolving your database schema

Update the data model in your Prisma schema by adding a model or a field:

// based on the schema in the "Adding Prisma to a new project" section
+model User {
+ id Int @id @default(autoincrement())
+ email String @unique
+ name String?
+ posts Post[]
+
+ @@map("users")
+}

model Post {
id Int @id @default(autoincrement())
createdAt DateTime @default(now())
title String
content String?
published Boolean @default(false)
viewCount Int @default(0)
+ author User? @relation(fields: [authorId], references: [id])
+ authorId Int? @map("author_id")

@@map("posts")
}

Next, use the @ruheni/db-diff CLI tool to generate up and down migrations:

npx db-diff

This command will generate up and down migrations based off of your Prisma schema. If you are currently using a different path to store the migration, you can provide the --migrations-dir flag followed by the path.

Next, apply the generated migration using the Platformatic CLI:

npx platformatic db migrations apply

And you're done!

Using Prisma Client in your plugins

Plugins allow you to add custom functionality to your REST and GraphQL API. Refer to the Add Custom Functionality to learn more how you can add custom functionality.

danger

Prisma Client usage with Platformatic is currently only supported in Node v18

You can use Prisma Client to interact with your database in your plugin.

To get started, run the following command:

npx prisma generate

The above command installs the @prisma/client in your project and generates a Prisma Client based off of your Prisma schema.

Install @sabinthedev/fastify-prisma fastify plugin. The plugin takes care of shutting down database connections and makes Prisma Client available as a Fastify plugin.

npm install @sabinthedev/fastify-prisma

Register the plugin and extend your REST API:

// 1.
const prismaPlugin = require("@sabinthedev/fastify-prisma")

module.exports = async (app) => {
app.log.info('plugin loaded')

// 2.
app.register(prismaPlugin)

/**
* Plugin logic
*/
// 3.
app.put('/post/:id/views', async (req, reply) => {

const { id } = req.params

// 4.
const post = await app.prisma.post.update({
where: {
id: Number(id)
},
data: {
viewCount: {
increment: 1
}
}
})

// 5.
return reply.send(post)
})
}

The snippet does the following:

  1. Imports the plugin
  2. Registers the @sabinthedev/fastify-prisma
  3. Defines the endpoint for incrementing the views of a post
  4. Makes a query to the database on the Post model to increment a post's view count
  5. Returns the updated post on success

If you would like to extend your GraphQL API, extend the schema and define the corresponding resolver:

plugin.js
// ./plugin.js
const prismaPlugin = require("@sabinthedev/fastify-prisma")

module.exports = async (app) => {
app.log.info('plugin loaded')

app.graphql.extendSchema(`
extend type Mutation {
incrementPostViewCount(id: ID): Post
}
`)

app.graphql.defineResolvers({
Mutation: {
incrementPostViewCount: async (_, { id }) => {
const post = await prisma.post.update({
where: {
id: Number(id)
},
data: {
viewCount: {
increment: 1
}
}
})

if (!post) throw new Error(`Post with id:${id} was not found`)
return post
}
}
})
}

Start the server:

npx platformatic db start

The query should now be included in your GraphQL schema.

You can also use the Prisma Client in your REST API endpoints.

Workarounds

Using Prisma with SQLite

Currently, Prisma doesn't resolve the file path of a SQLite database the same way as Platformatic does.

If your database is at the root of the project, create a new environment variable that Prisma will use called PRISMA_DATABASE_URL:

# .env
DATABASE_URL="sqlite://db.sqlite"
PRISMA_DATABASE_URL="file:../db.sqlite"

Next, update the url value in the datasource block in your Prisma schema with the updated value:

prisma/schema.prisma
// ./prisma/schema.prisma
datasource db {
provider = "sqlite"
url = env("PRISMA_DATABASE_URL")
}

Running migrations should now work smoothly and the path will be resolved correctly.

Foreign keys, field, and table names naming conventions

Foreign key names should use underscores, e.g. author_id, for Platformatic DB to correctly map relations. You can use the @map("") attribute to define the names of your foreign keys and field names to be defined in the database.

Table names should be mapped to use the naming convention expected by Platformatic DB e.g. @@map("recipes") (the Prisma convention is Recipe, which corresponds with the model name).

You can use prisma-case-format to enforce your own database conventions, i.e., pascal, camel, and snake casing.

Learn more

If you would like to learn more about Prisma, be sure to check out the Prisma docs.

+ + + + \ No newline at end of file diff --git a/docs/0.41.2/guides/securing-platformatic-db/index.html b/docs/0.41.2/guides/securing-platformatic-db/index.html new file mode 100644 index 00000000000..8bd4fca5cee --- /dev/null +++ b/docs/0.41.2/guides/securing-platformatic-db/index.html @@ -0,0 +1,31 @@ + + + + + +Securing Platformatic DB with Authorization | Platformatic Open Source Software + + + + + +
+
Version: 0.41.2

Securing Platformatic DB with Authorization

Introduction

Authorization in Platformatic DB is role-based. User authentication and the +assignment of roles must be handled by an external authentication service. +Take a look to at the reference documentation for Authorization.

The goal of this simple guide is to protect an API built with Platformatic DB +with the use of a shared secret, that we call adminSecret. We want to prevent +any user that is not an admin to access the data.

The use of an adminSecret is a simplistic way of securing a system. +It is a crude way for limiting access and not suitable for production systems, +as the risk of leaking the secret is high in case of a security breach. +A production friendly way would be to issue a machine-to-machine JSON Web Token, +ideally with an asymmetric key. Alternatively, you can defer to an external +service via a Web Hook.

Please refer to our guide to set up Auth0 for more information +on JSON Web Tokens.

Block access to all entities, allow admins

The following configuration will block all anonymous users (e.g. each user without a known role) +to access every entity:

{
...
"authorization": {
"adminSecret": "replaceWithSomethingRandomAndSecure"
}
}

The data will still be available if the X-PLATFORMATIC-ADMIN-SECRET HTTP header +is specified when making HTTP calls, like so:

curl -H 'X-PLATFORMATIC-ADMIN-SECRET: replaceWithSomethingRandomAndSecure' http://127.0.0.1:3042/pages
info

Configuring JWT or Web Hooks will have the same result of configuring an admin secret.

Authorization rules

Rules can be provided based on entity and role in order to restrict access and provide fine grained access. +To make an admin only query and save the page table / page entity using adminSecret this structure should be used in the platformatic.db configuration file:

  ...
"authorization": {
"adminSecret": "easy",
"rules": [{
"entity": "movie"
"role": "platformatic-admin",
"find": true,
"save": true,
"delete": false,
}
]
}
info

Note that the role of an admin user from adminSecret strategy is platformatic-admin by default.

Read-only access to anonymous users

The following configuration will allo all anonymous users (e.g. each user without a known role) +to access the pages table / page entity in Read-only mode:

{
...
"authorization": {
"adminSecret": "replaceWithSomethingRandomAndSecure"
"rules": [{
"role": "anonymous",
"entity": "page",
"find": true,
"save": false,
"delete": false
}]
}
}

Note that we set find as true to allow the access, while the other options are false.

Work in Progress

This guide is a Work-In-Progress. Let us know what other common authorization use cases we should cover.

+ + + + \ No newline at end of file diff --git a/docs/0.41.2/guides/seed-a-database/index.html b/docs/0.41.2/guides/seed-a-database/index.html new file mode 100644 index 00000000000..6afea76cfcc --- /dev/null +++ b/docs/0.41.2/guides/seed-a-database/index.html @@ -0,0 +1,21 @@ + + + + + +Seed a Database | Platformatic Open Source Software + + + + + +
+
Version: 0.41.2

Seed a Database

A database is as useful as the data that it contains: a fresh, empty database +isn't always the best starting point. We can add a few rows from our migrations +using SQL, but we might need to use JavaScript from time to time.

The platformatic db seed command allows us to run a +script that will populate — or "seed" — our database.

Example

Our seed script should export a Function that accepts an argument: +an instance of @platformatic/sql-mapper.

seed.js
'use strict'

module.exports = async function ({ entities, db, sql }) {
await entities.graph.save({ input: { name: 'Hello' } })
await db.query(sql`
INSERT INTO graphs (name) VALUES ('Hello 2');
`)
}

We can then run the seed script with the Platformatic CLI:

npx platformatic db seed seed.js
+ + + + \ No newline at end of file diff --git a/docs/0.41.2/guides/telemetry/index.html b/docs/0.41.2/guides/telemetry/index.html new file mode 100644 index 00000000000..5d9e25a6d6a --- /dev/null +++ b/docs/0.41.2/guides/telemetry/index.html @@ -0,0 +1,21 @@ + + + + + +Telemetry with Jaeger | Platformatic Open Source Software + + + + + +
+
Version: 0.41.2

Telemetry with Jaeger

Introduction

Platformatic supports Open Telemetry integration. This allows you to send telemetry data to one of the OTLP compatible servers (see here) or to a Zipkin server. Let's show this with Jaeger.

Jaeger setup

The quickest way is to use docker:

docker run -d --name jaeger \
-e COLLECTOR_OTLP_ENABLED=true \
-p 16686:16686 \
-p 4317:4317 \
-p 4318:4318 \
jaegertracing/all-in-one:latest

Check that the server is running by opening http://localhost:16686/ in your browser.

Platformatic setup

Will test this with a Platformatic Composer that proxy requests to a Platformatic Service, which in turn invokes a Platformatic DB Service. +In this way we show that the telemetry is propagated from the Composer throughout the services and the collected correctly. +Let's setup all this components:

Platformatic DB Service

Create a folder for DB and cd into it:

mkdir test-db
cd test-db

Then create a db in the folder using npx create-platformatic@latest:

npx create-platformatic@latest

To make it simple, use sqlite and create/apply the default migrations. This DB Service is exposed on port 5042:


➜ npx create-platformatic@latest

Hello user, welcome to Platformatic 0.32.0!
Let's start by creating a new project.
? Which kind of project do you want to create? DB
? Where would you like to create your project? .
? What database do you want to use? SQLite
? Do you want to use the connection string "sqlite://./db.sqlite"? Confirm
? Do you want to create default migrations? yes
? Do you want to create a plugin? no
? Do you want to use TypeScript? no
? What port do you want to use? 5042
[15:40:46] INFO: Configuration file platformatic.db.json successfully created.
[15:40:46] INFO: Environment file .env successfully created.
[15:40:46] INFO: Migrations folder migrations successfully created.
[15:40:46] INFO: Migration file 001.do.sql successfully created.
[15:40:46] INFO: Migration file 001.undo.sql successfully created.
[15:40:46] INFO: Plugin file created at plugin.js
? Do you want to run npm install? no
? Do you want to apply migrations? yes
...done!
? Do you want to generate types? no
? Do you want to create the github action to deploy this application to Platformatic Cloud dynamic workspace? no
? Do you want to create the github action to deploy this application to Platformatic Cloud static workspace? no

All done! Please open the project directory and check the README.
Will test this in one example with a Platformatic Composer that proxy requests to a Platformatic Service, which in turn invokes a Platformatic DB.

Open the platformatic.db.json file and add the telementry configuration:

  "telemetry": {
"serviceName": "test-db",
"exporter": {
"type": "otlp",
"options": {
"url": "http://localhost:4318/v1/traces"
}
}
}

Finally, start the DB service:

npx platformatic db start

Platformatic Service

Create at the same level of test-db another folder for Service and cd into it:

mkdir test-service
cd test-service

Then create a service on the 5043 port in the folder using npx create-platformatic@latest:

➜ npx create-platformatic@latest

Hello user, welcome to Platformatic 0.32.0!
Let's start by creating a new project.
? Which kind of project do you want to create? Service
? Where would you like to create your project? .
? Do you want to run npm install? no
? Do you want to use TypeScript? no
? What port do you want to use? 5043
[15:55:35] INFO: Configuration file platformatic.service.json successfully created.
[15:55:35] INFO: Environment file .env successfully created.
[15:55:35] INFO: Plugins folder "plugins" successfully created.
[15:55:35] INFO: Routes folder "routes" successfully created.
? Do you want to create the github action to deploy this application to Platformatic Cloud dynamic workspace? no
? Do you want to create the github action to deploy this application to Platformatic Cloud static workspace? no

Open the platformatic.service.json file and add the following telemetry configuration (it's exactly the same as DB, but with a different serviceName)

  "telemetry": {
"serviceName": "test-service",
"exporter": {
"type": "otlp",
"options": {
"url": "http://localhost:4318/v1/traces"
}
}
}

We want this service to invoke the DB service, so we need to add a client for test-db to it:

npx platformatic client http://127.0.0.1:5042 js --name movies

Check platformatic.service.json to see that the client has been added (PLT_MOVIES_URL is defined in .env):

    "clients": [
{
"schema": "movies/movies.openapi.json",
"name": "movies",
"type": "openapi",
"url": "{PLT_MOVIES_URL}"
}
]

Now open routes/root.js and add the following:

  fastify.get('/movies-length', async (request, reply) => {
const movies = await request.movies.getMovies()
return { length: movies.length }
})

This code calls movies to get all the movies and returns the length of the array.

Finally, start the service:

npx platformatic service start

Platformatic Composer

Create at the same level of test-db and test-service another folder for Composer and cd into it:

mkdir test-composer
cd test-composer

Then create a composer on the 5044 port in the folder using npx create-platformatic@latest:

➜ npx create-platformatic@latest

Hello marcopiraccini, welcome to Platformatic 0.32.0!
Let's start by creating a new project.
? Which kind of project do you want to create? Composer
? Where would you like to create your project? .
? What port do you want to use? 5044
? Do you want to run npm install? no
[16:05:28] INFO: Configuration file platformatic.composer.json successfully created.
[16:05:28] INFO: Environment file .env successfully created.
? Do you want to create the github action to deploy this application to Platformatic Cloud dynamic workspace? no
? Do you want to create the github action to deploy this application to Platformatic Cloud static workspace? no

All done! Please open the project directory and check the README.

Open platformatic.composer.js and change it to the following:

{
"$schema": "https://platformatic.dev/schemas/v0.32.0/composer",
"server": {
"hostname": "{PLT_SERVER_HOSTNAME}",
"port": "{PORT}",
"logger": {
"level": "{PLT_SERVER_LOGGER_LEVEL}"
}
},
"composer": {
"services": [
{
"id": "example",
"origin": "http://127.0.0.1:5043",
"openapi": {
"url": "/documentation/json"
}
}
],
"refreshTimeout": 3000
},
"telemetry": {
"serviceName": "test-composer",
"exporter": {
"type": "otlp",
"options": {
"url": "http://localhost:4318/v1/traces"
}
}
},
"watch": true
}

Note that we just added test-service as origin of the proxed service and added the usual telementry configuration, with a different serviceName.

Finally, start the composer:

npx platformatic composer start

Run the Test

Check that the composer is exposing movies-length opening: http://127.0.0.1:5044/documentation/

You should see: +image

To add some data, we can POST directly to the DB service (port 5042):

curl -X POST -H "Content-Type: application/json" -d '{"title":"The Matrix"}' http://127.0.0.1:5042/movies 
curl -X POST -H "Content-Type: application/json" -d '{"title":"The Matrix Reloaded"}' http://127.0.0.1:5042/movies

Now, let's check that the composer (port 5044) is working:

curl http://127.0.0.1:5044/movies-length

If the composer is working correctly, you should see:

{"length":2}

However, the main interest of this example is to show how to use the Platformatic Telemetry, so let's check it. +Open the Jaeger UI at http://localhost:16686/ and you should see something like this:

image

Select on the left the test-composer service and the GET /movies-length operation, click on "Find traces" and you should see something like this:

image

You can then click on the trace and see the details:

image

Note that everytime a request is received or client call is done, a new span is started. So we have:

  • One span for the request received by the test-composer
  • One span for the client call to test-service
  • One span for the request received by test-service
  • One span for the client call to test-db
  • One span for the request received by test-db

All these spans are linked together, so you can see the whole trace.

What if you want to use Zipkin?

Starting from this example, it's also possible to run the same test using Zipkin. To do so, you need to start the Zipkin server:

docker run -d -p 9411:9411 openzipkin/zipkin

Then, you need to change the telemetry configuration in all the platformatic.*.json to the following (only the exporter object is different`)

  "telemetry": {
(...)
"exporter": {
"type": "zipkin",
"options": {
"url": "http://127.0.0.1:9411/api/v2/spans"
}
}
}

The zipkin ui is available at http://localhost:9411/

+ + + + \ No newline at end of file diff --git a/docs/0.41.2/platformatic-cloud/deploy-database-neon/index.html b/docs/0.41.2/platformatic-cloud/deploy-database-neon/index.html new file mode 100644 index 00000000000..95169294526 --- /dev/null +++ b/docs/0.41.2/platformatic-cloud/deploy-database-neon/index.html @@ -0,0 +1,32 @@ + + + + + +Deploy a PostgreSQL database with Neon | Platformatic Open Source Software + + + + + +
+
Version: 0.41.2

Deploy a PostgreSQL database with Neon

Neon offers multi-cloud fully managed +Postgres with a generous free tier. They separated storage and +compute to offer autoscaling, branching, and bottomless storage. +It offers a great environment for creating database preview +environments for your Platformatic DB +applications.

This guide shows you how to integrate Neon branch deployments with your +Platformatic app's GitHub Actions workflows. It assumes you have already +followed the Quick Start Guide.

Create a project on Neon

To set up an account with Neon, open their website, sign up and create a +new project.

Take note of the following configuration setting values:

  • The connection string for your main branch database, to be stored in a NEON_DB_URL_PRODUCTION secret
  • The Project ID (available under the project Settings), to be stored in a NEON_PROJECT_ID secret
  • Your API key (available by clicking on your user icon > Account > Developer settings), to be stored under NEON_API_KEY

You can learn more about Neon API keys in their Manage API Keys documentation.

Configure Github Environments and Secrets

Now you need to set the configuration values listed above as +repository secrets +on your project's GitHub repository. +Learn how to use environments for deployment in GitHub's documentation.

Configure the GitHub Environments for your repository to have:

  • production secrets, available only to the main branch:
    • NEON_DB_URL_PRODUCTION
  • previews secrets available to all branches:
    • NEON_PROJECT_ID
    • NEON_API_KEY

Configure the main branch workflow

Replace the contents of your app's workflow for static workspace deployment:

.github/workflows/platformatic-static-workspace-deploy.yml
name: Deploy Platformatic application to the cloud
on:
push:
branches:
- main
paths-ignore:
- 'docs/**'
- '**.md'

jobs:
build_and_deploy:
environment:
name: production
permissions:
contents: read
runs-on: ubuntu-latest
steps:
- name: Checkout application project repository
uses: actions/checkout@v4
- name: npm install --omit=dev
run: npm install --omit=dev
- name: Deploy project
uses: platformatic/onestep@latest
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
platformatic_workspace_id: <YOUR_STATIC_WORKSPACE_ID>
platformatic_workspace_key: ${{ secrets.PLATFORMATIC_STATIC_WORKSPACE_API_KEY }}
platformatic_config_path: ./platformatic.db.json
secrets: DATABASE_URL
env:
DATABASE_URL: ${{ secrets.NEON_DB_URL_PRODUCTION }}
PLT_SERVER_LOGGER_LEVEL: info
PORT: 3042
PLT_SERVER_HOSTNAME: 127.0.0.1

Replace <YOUR_STATIC_WORKSPACE_ID> with the workspace ID that you previously had in this file.

When your app is deployed to the static workspace it will now be configured to connect to the +main branch database for your Neon project.

Configure the preview environment workflow

Neon allows up to 10 database branches on their free tier. You can automatically create a new +database branch when a pull request is opened, and then automatically remove it when the pull +request is merged.

GitHub Action to create a preview environment

Replace the contents of your app's workflow for dynamic workspace deployment:

.github/workflows/platformatic-dynamic-workspace-deploy.yml
name: Deploy to Platformatic cloud
on:
pull_request:
paths-ignore:
- 'docs/**'
- '**.md'

# This allows a subsequently queued workflow run to interrupt previous runs
concurrency:
group: "${{ github.workflow }} @ ${{ github.event.pull_request.head.label || github.head_ref || github.ref }}"
cancel-in-progress: true

jobs:
build_and_deploy:
runs-on: ubuntu-latest
environment:
name: development
steps:
- name: Checkout application project repository
uses: actions/checkout@v4
- name: npm install --omit=dev
run: npm install --omit=dev
- name: Get PR number
id: get_pull_number
run: |
pull_number=$(jq --raw-output .pull_request.number "$GITHUB_EVENT_PATH")
echo "pull_number=${pull_number}" >> $GITHUB_OUTPUT
echo $pull_number
- uses: neondatabase/create-branch-action@v4
with:
project_id: ${{ secrets.NEON_PROJECT_ID }}
branch_name: pr-${{ steps.get_pull_number.outputs.pull_number }}
api_key: ${{ secrets.NEON_API_KEY }}
id: create-branch
- name: Deploy project
uses: platformatic/onestep@latest
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
platformatic_workspace_id: ${{ secrets.PLATFORMATIC_DYNAMIC_WORKSPACE_ID }}
platformatic_workspace_key: ${{ secrets.PLATFORMATIC_DYNAMIC_WORKSPACE_KEY }}
platformatic_config_path: ./platformatic.db.json
env:
DATABASE_URL: ${{ steps.create-branch.outputs.db_url }}
PLT_SERVER_LOGGER_LEVEL: info
PORT: 3042
PLT_SERVER_HOSTNAME: 127.0.0.1

Replace <YOUR_DYNAMIC_WORKSPACE_ID> with the workspace ID that you previously had in this file.

Configure preview environment cleanup

After a pull request to the main branch is merged, you should remove the matching database branch.

Create a new file, .github/workflows/cleanup-neon-branch-db.yml, and copy and paste in the following +workflow configuration:

.github/workflows/cleanup-neon-branch-db.yml
name: Cleanup Neon Database Branch
on:
push:
branches:
- 'main'
jobs:
delete-branch:
environment:
name: development
permissions: write-all
runs-on: ubuntu-latest
steps:
- name: Get PR info
id: get-pr-info
uses: actions-ecosystem/action-get-merged-pull-request@v1.0.1
with:
github_token: ${{secrets.GITHUB_TOKEN}}
- run: |
echo ${{ steps.get-pr-info.outputs.number}}
- name: Delete Neon Branch
if: ${{ steps.get-pr-info.outputs.number }}
uses: neondatabase/delete-branch-action@v3
with:
project_id: ${{ secrets.NEON_PROJECT_ID }}
branch: pr-${{ steps.get-pr-info.outputs.number }}
api_key: ${{ secrets.NEON_API_KEY }}

Deployment

To deploy these changes to your app:

  1. Create a Git branch locally (git checkout -b <BRANCH_NAME>)
  2. Commit your changes and push them to GitHub
  3. Open a pull request on GitHub - a branch will automatically be created for your Neon database and a preview app will be deployed to Platformatic Cloud (in your app's dynamic workspace).
  4. Merge the pull request - the Neon databsase branch will be automatically deleted and your app will be deployed to Platformatic Cloud (in your app's static workspace).
+ + + + \ No newline at end of file diff --git a/docs/0.41.2/platformatic-cloud/pricing/index.html b/docs/0.41.2/platformatic-cloud/pricing/index.html new file mode 100644 index 00000000000..c727d2753cc --- /dev/null +++ b/docs/0.41.2/platformatic-cloud/pricing/index.html @@ -0,0 +1,23 @@ + + + + + +Platformatic Cloud Pricing | Platformatic Open Source Software + + + + + +
+
Version: 0.41.2

Platformatic Cloud Pricing

Find the plan that works best for you!

FreeBasicAdvancedPro
Pricing$0$4.99$22.45$49.99
Slots01512
CNAME-truetruetrue
Always On-truetruetrue

FAQ

What is a slot?

One slot is equal to one compute unit. The free plan has no always-on +machines and they will be stopped while not in use.

What is a workspace?

A workspace is the security boundary of your deployment. You will use +the same credentials to deploy to one.

A workspace can be either static or dynamic. +A static workspace always deploy to the same domain, while +in a dynamic workspace each deployment will have its own domain. +The latter are useful to provde for pull request previews.

Can I change or upgrade my plan after I start using Platformatic?

Plans can be changed or upgraded at any time

What does it mean I can set my own CNAME?

Free applications only gets a *.deploy.space domain name to access +their application. All other plans can set it to a domain of their chosing.

+ + + + \ No newline at end of file diff --git a/docs/0.41.2/platformatic-cloud/quick-start-guide/index.html b/docs/0.41.2/platformatic-cloud/quick-start-guide/index.html new file mode 100644 index 00000000000..980553c0784 --- /dev/null +++ b/docs/0.41.2/platformatic-cloud/quick-start-guide/index.html @@ -0,0 +1,45 @@ + + + + + +Cloud Quick Start Guide | Platformatic Open Source Software + + + + + +
+
Version: 0.41.2

Cloud Quick Start Guide

This guide shows you how to create and deploy an application to +Platformatic Cloud.

Prerequisites

To follow along with this guide you'll need to have these things installed:

You will also need to have a GitHub account.

Log in to Platformatic Cloud

Go to the Platformatic Cloud website and click on the +Continue with GitHub button. You'll be transferred to a GitHub page that +asks you to Authorize Platformatic Cloud. To continue, click on the +Authorize platformatic button.

Screenshot of Continue with GitHub button

On the Platformatic Cloud Service Agreements page, check the boxes and +click the Continue button. You'll then be redirected to your Cloud Dashboard page.

Create a Cloud app

Screenshot of an empty Apps page

Click the Create an app now button on your Cloud Dashboard page.

Enter quick-start-app as your application name. Click the Create Application button.

Create a static app workspace

Enter production as the name for your workspace. Then click on the Create Workspace button.

On the next page you'll see the Workspace ID and API key for your app workspace. +Copy them and store them somewhere secure for future reference, for example in a password manager app. +The API key will be used to deploy your app to the workspace that you've just created.

Click on the Back to dashboard button.

Create a dynamic app workspace

On your Cloud Dashboard, click on your app, then click on Create Workspace in the Workspaces +sidebar.

Screenshot of the create app workspace screen

The Dynamic Workspace option will be automatically enabled as you have already created a +static workspace. Dynamic workspaces can be used to deploy preview applications for GitHub +pull requests.

Enter development as the name for your workspace, then click on the Create Workspace button. +Copy the Workspace ID and API key and store them somewhere secure.

Create a GitHub repository

Go to the Create a new repository page on GitHub. +Enter quick-start-app as the Repository name for your new repository. +Click on the Add a README file checkbox and click the Create repository +button.

Add the workspace API keys as repository secrets

Go to the Settings tab on your app's GitHub repository. Click into the +Secrets and variables > Actions section and add the following secrets:

NameSecret
PLATFORMATIC_STATIC_WORKSPACE_IDYour app's static workspace ID
PLATFORMATIC_STATIC_WORKSPACE_API_KEYYour app's static workspace API key
PLATFORMATIC_DYNAMIC_WORKSPACE_IDYour app's dynamic workspace ID
PLATFORMATIC_DYNAMIC_WORKSPACE_API_KEYYour app's dynamic workspace API key

Click on the New repository secret button to add a secret.

tip

You can also use the GitHub CLI to set secrets on your GitHub repository, for example:

gh secret set \
--app actions \
--env-file <FILENAME_OF_ENV_FILE_WITH_SECRETS> \
--repos <YOUR_GITHUB_USERNAME>/<REPO_NAME>

Create a new Platformatic app

In your terminal, use Git to clone your repository from GitHub. For example:

git clone git@github.com:username/quick-start-app.git
tip

See the GitHub documentation for help with +Cloning a repository.

Now change in to the project directory:

cd quick-start-app

Now run this command to start the Platformatic creator wizard:

npm create platformatic@latest

This interactive command-line tool will ask you some questions about how you'd +like to set up your new Platformatic app. For this guide, select these options:

- Which kind of project do you want to create?     => DB
- Where would you like to create your project? => .
- Do you want to create default migrations? => yes
- Do you want to create a plugin? => yes
- Do you want to use TypeScript? => no
- Do you want to overwrite the existing README.md? => yes
- Do you want to run npm install? => yes (this can take a while)
- Do you want to apply the migrations? => yes
- Do you want to generate types? => yes
- Do you want to create the github action to deploy this application to Platformatic Cloud dynamic workspace? => yes
- Do you want to create the github action to deploy this application to Platformatic Cloud static workspace? => yes

Copy and paste your dynamic and static workspace IDs when prompted by the creator wizard.

Once the wizard is complete, you'll have a Platformatic app project in the +quick-start-app directory, with example migration files and a plugin script.

Deploy the app

In your project directory, commit your application with Git:

git add .

git commit -m "Add Platformatic app"

Now push your changes up to GitHub:

git push origin main

On the GitHub repository page in your browser click on the Actions tab. +You should now see the Platformatic Cloud deployment workflow running.

Test the deployed app

Screenshot of a static app workspace that has had an app deployed to it

Once the GitHub Actions deployment workflow has completed, go to the production workspace +for your app in Platformatic Cloud. Click on the link for the Entry Point. You should now +see the Platformatic DB app home page.

Click on the OpenAPI Documentation link to try out your app's REST API using the Swagger UI.

Screenshot of Swagger UI for a Platformatic DB app

Preview pull request changes

When a pull request is opened on your project's GitHub repository, a preview app will automatically +be deployed to your app's dynamic workspace.

To see a preview app in action, create a new Git branch:

git checkout -b add-hello-endpoint

Then open up your app's plugin.js file in your code editor. Add the following code inside +the existing empty function:

app.get('/hello', async function(request, reply) {
return { hello: 'from Platformatic Cloud' }
})

Save the changes, then commit and push them up to GitHub:

git add plugin.js

git commit -m "Add hello endpoint"

git push -u origin add-hello-endpoint

Now create a pull request for your changes on GitHub. At the bottom of the +pull request page you'll see that a deployment has been triggered to your +app's dynamic workspace.

Screenshot of checks on a GitHub pull request

Once the deployment has completed, a comment will appear on your pull request +with a link to the preview app.

Screenshot of a deployed preview app comment on a GitHub pull request

Click on the Application URL link. If you add /hello on to the URL, +you should receive a response from the endpoint that you just added to +your application.

Screenshot of a JSON response from an API endpoint

+ + + + \ No newline at end of file diff --git a/docs/0.41.2/reference/cli/index.html b/docs/0.41.2/reference/cli/index.html new file mode 100644 index 00000000000..870d3b4a8a4 --- /dev/null +++ b/docs/0.41.2/reference/cli/index.html @@ -0,0 +1,44 @@ + + + + + +Platformatic CLI | Platformatic Open Source Software + + + + + +
+
Version: 0.41.2

Platformatic CLI

Installation and usage

Install the Platformatic CLI as a dependency for your project:

npm install platformatic

Once it's installed you can run it with:

npx platformatic
info

The platformatic package can be installed globally, but installing it as a +project dependency ensures that everyone working on the project is using the +same version of the Platformatic CLI.

Commands

The Platformatic CLI provides the following commands:

help

Welcome to Platformatic. Available commands are:

  • help - display this message.
  • help <command> - show more information about a command.
  • db - start Platformatic DB; type platformatic db help to know more.
  • service - start Platformatic Service; type platformatic service help to know more.
  • upgrade - upgrade the Platformatic configuration to the latest version.
  • gh - create a new gh action for Platformatic deployments.
  • deploy - deploy a Platformatic application to the cloud.
  • runtime - start Platformatic Runtime; type platformatic runtime help to know more.
  • start - start a Platformatic application.
  • frontend- create frontend code to consume the REST APIs.

compile

Compile all typescript plugins.

  $ platformatic compile

This command will compile the TypeScript plugins for each platformatic application.

deploy

Deploys an application to the Platformatic Cloud.

 $ platformatic deploy

Options:

  • -t, --type static/dynamic - The type of the workspace.
  • -c, --config FILE - Specify a configuration file to use.
  • -k, --keys FILE - Specify a path to the workspace keys file.
  • -l --label TEXT - The deploy label. Only for dynamic workspaces.
  • -e --env FILE - The environment file to use. Default: ".env"
  • -s --secrets FILE - The secrets file to use. Default: ".secrets.env"
  • --workspace-id uuid - The workspace id where the application will be deployed.
  • --workspace-key TEXT - The workspace key where the application will be deployed.
  1. To deploy a Platformatic application to the cloud, you should go to the Platformatic cloud dashboard and create a workspace.
  2. Once you have created a workspace, retrieve your workspace id and key from the workspace settings page. Optionally, you can download the provided workspace env file, which you can use with the --keys option.

ℹ️

When deploying an application to a dynamic workspace, specify the deploy --label option. You can find it on your cloud dashboard or you can specify a new one.

gh

Creates a gh action to deploy platformatic services on workspaces.

 $ platformatic gh -t dynamic

Options:

  • -w --workspace ID - The workspace ID where the service will be deployed.
  • -t, --type static/dynamic - The type of the workspace. Defaults to static.
  • -c, --config FILE - Specify a configuration file to use.
  • -b, --build - Build the service before deploying (npm run build).

If not specified, the configuration will be loaded from any of the following, in the current directory.

  • platformatic.db.json, or
  • platformatic.db.yml, or
  • platformatic.db.tml, or
  • platformatic.service.json, or
  • platformatic.service.yml, or
  • platformatic.service.tml

You can find more details about the configuration format here:

start

Start a Platformatic application with the following command:

$ platformatic start

Options:

  • -c, --config <path> - Path to the configuration file.
  • --inspect[=[host:]port] - Start the Node.js debugger. host defaults to '127.0.0.1'. port defaults to 9229. Use caution when binding to a public host:port combination.
  • --inspect-brk[=[host:]port] - Start the Node.js debugger and block until a client has attached. host defaults to '127.0.0.1'. port defaults to 9229. Use caution when binding to a public host:port combination.

upgrade

Upgrade the Platformatic schema configuration to the latest version.

 $ platformatic upgrade

Options:

  • -c, --config FILE - Specify a schema configuration file to use.

If not specified, the configuration will be loaded from any of the following, in the current directory.

  • platformatic.db.json, or
  • platformatic.db.yml, or
  • platformatic.db.tml, or
  • platformatic.service.json, or
  • platformatic.service.yml, or
  • platformatic.service.tml

You can find more details about the configuration format here:

client

platformatic client <command>

help

Create a Fastify plugin that exposes a client for a remote OpenAPI or GraphQL API.

To create a client for a remote OpenAPI API, you can use the following command:

$ platformatic client http://example.com/to/schema/file -n myclient

To create a client for a remote Graphql API, you can use the following command:

$ platformatic client http://example.com/graphql -n myclient

Instead of a URL, you can also use a local file:

$ platformatic client path/to/schema -n myclient

This will create a Fastify plugin that exposes a client for the remote API in a folder myclient +and a file named myclient.js inside it.

If platformatic config file is specified, it will be edited and a clients section will be added. +Then, in any part of your Platformatic application you can use the client.

You can use the client in your application in Javascript, calling a GraphQL endpoint:

module.exports = async function (app, opts) {
app.post('/', async (request, reply) => {
const res = await app.myclient.graphql({
query: 'query { hello }'
})
return res
})
}

or in Typescript, calling an OpenAPI endpoint:

import { FastifyInstance } from 'fastify'
/// <reference path="./myclient" />

export default async function (app: FastifyInstance) {
app.get('/', async () => {
return app.myclient.get({})
})
}

Options:

  • -c, --config <path> - Path to the configuration file.
  • -n, --name <name> - Name of the client.
  • -f, --folder <name> - Name of the plugin folder, defaults to --name value.
  • -t, --typescript - Generate the client plugin in TypeScript.
  • --full-response - Client will return full response object rather than just the body.
  • --full-request - Client will be called with all parameters wrapped in body, headers and query properties.
  • --full - Enables both --full-request and --full-response overriding them.
  • --optional-headers <headers> - Comma separated string of headers that will be marked as optional in the type file

composer

platformatic composer <command>

help

Available commands:

  • help - show this help message.
  • help <command> - shows more information about a command.
  • start - start the server.
  • openapi schemas fetch - fetch OpenAPI schemas from services.

openapi schemas fetch

Fetch OpenAPI schemas from remote services to use in your Platformatic project.

  $ platformatic composer openapi schemas fetch

It will fetch all the schemas from the remote services and store them by path +set in the platformatic.composer.json file. If the path is not set, it will +skip fetching the schema.

start

Start the Platformatic Composer server with the following command:

 $ platformatic composer start

You will need a configuration file. Here is an example to get you started, +save the following as platformatic.composer.json:

  {
"server": {
"hostname": "127.0.0.1",
"port": 0,
"logger": {
"level": "info"
}
},
"composer": {
"services": [
{
"id": "service1",
"origin": "http://127.0.0.1:3051",
"openapi": {
"url": "/documentation/json"
}
},
{
"id": "service2",
"origin": "http://127.0.0.1:3052",
"openapi": {
"file": "./schemas/service2.openapi.json"
}
}
],
"refreshTimeout": 1000
}
}

By sending the SIGUSR2 signal, the server can be reloaded.

Options:

  • -c, --config FILE - Specify a configuration file to use.

If not specified, the configuration will be loaded from any of the following, in the current directory.

  • platformatic.composer.json, or
  • platformatic.composer.yml, or
  • platformatic.composer.tml

You can find more details about the configuration format here:

db

platformatic db <command>

compile

Compile typescript plugins.

  $ platformatic db compile

As a result of executing this command, the Platformatic DB will compile typescript +plugins in the outDir directory.

If not specified, the configuration will be loaded from any of the following, in the current directory.

  • platformatic.db.json, or
  • platformatic.db.yml, or
  • platformatic.db.tml

You can find more details about the configuration format here:

help

Available commands:

  • help - show this help message.
  • help <command> - shows more information about a command.
  • start - start the server.
  • compile - compile typescript plugins.
  • seed - run a seed file.
  • types - generate typescript types for entities.
  • schema - generate and print api schema.
  • migrations create - generate do and undo migration files.
  • migrations apply - apply migration files.

migrations apply

Apply all configured migrations to the database:

  $ platformatic db migrations apply

The migrations will be applied in the order they are specified in the +folder defined in the configuration file. If you want to apply a specific migration, +you can use the --to option:

  $ platformatic db migrations apply --to 001

Here is an example migration:

  CREATE TABLE graphs (
id SERIAL PRIMARY KEY,
name TEXT
);

You can always rollback to a specific migration with:

  $ platformatic db migrations apply --to VERSION

Use 000 to reset to the initial state.

Options:

  • -c, --config <path> - Path to the configuration file.
  • -t, --to <version> - Migrate to a specific version.

If not specified, the configuration will be loaded from any of the following, in the current directory.

  • platformatic.db.json, or
  • platformatic.db.yml, or
  • platformatic.db.tml

You can find more details about the configuration format here:

migrations create

Create next migration files.

  $ platformatic db migrations create

It will generate do and undo sql files in the migrations folder. The name of the +files will be the next migration number.

  $ platformatic db migrations create --name "create_users_table"

Options:

  • -c, --config <path> - Path to the configuration file.

If not specified, the configuration will be loaded from any of the following, in the current directory.

  • platformatic.db.json, or
  • platformatic.db.yml, or
  • platformatic.db.tml

You can find more details about the configuration format here:

migrations

Available commands:

  • migrations create - generate do and undo migration files.
  • migrations apply - apply migration files.

schema

Update the config schema file:

  • schema config - update the JSON schema config available on platformatic.db.schema.json

Your configuration on platformatic.db.json has a schema defined to improve the developer experience and avoid mistakes when updating the configuration of Platformatic DB. +When you run platformatic db init, a new JSON $schema property is added in platformatic.db.schema.json. This can allow your IDE to add suggestions (f.e. mandatory/missing fields, types, default values) by opening the config in platformatic.db.json. +Running platformatic db schema config you can update your schema so that it matches well the latest changes available on your config.

Generate a schema from the database and prints it to standard output:

  • schema graphql - generate the GraphQL schema
  • schema openapi - generate the OpenAPI schema

Options:

  • -c, --config FILE - Specify a configuration file to use.

If not specified, the configuration will be loaded from any of the following, in the current directory.

  • platformatic.db.json, or
  • platformatic.db.yml, or
  • platformatic.db.tml

You can find more details about the configuration format here:

seed

Load a seed into the database. This is a convenience method that loads +a JavaScript file and configure @platformatic/sql-mapper to connect to +the database specified in the configuration file.

Here is an example of a seed file:

  'use strict'

module.exports = async function ({ entities, db, sql }) {
await entities.graph.save({ input: { name: 'Hello' } })
await db.query(sql`
INSERT INTO graphs (name) VALUES ('Hello 2');
`)
}

You can run this using the seed command:

  $ platformatic db seed seed.js

Options:

  • --config - Path to the configuration file.

If not specified, the configuration will be loaded from any of the following, in the current directory.

  • platformatic.db.json, or
  • platformatic.db.yml, or
  • platformatic.db.tml

You can find more details about the configuration format here:

start

Start the Platformatic DB server with the following command:

 $ platformatic db start

You will need a configuration file. Here is an example to get you started, +save the following as platformatic.db.json:

  {
"server": {
"hostname": "127.0.0.1",
"port": 0,
"logger": {
"level": "info"
}
},
"db": {
"connectionString": "sqlite://./db"
},
"migrations": {
"dir": "./migrations"
}
}

Remember to create a migration, run the db help migrate command to know more.

All outstanding migrations will be applied to the database unless the +migrations.autoApply configuration option is set to false.

By sending the SIGUSR2 signal, the server can be reloaded.

Options:

  • -c, --config FILE - Specify a configuration file to use.

If not specified, the configuration will be loaded from any of the following, in the current directory.

  • platformatic.db.json, or
  • platformatic.db.yml, or
  • platformatic.db.tml

You can find more details about the configuration format here:

types

Generate typescript types for your entities from the database.

  $ platformatic db types

As a result of executing this command, the Platformatic DB will generate a types +folder with a typescript file for each database entity. It will also generate a +global.d.ts file that injects the types into the Application instance.

In order to add type support to your plugins, you need to install some additional +dependencies. To do this, copy and run an npm install command with dependencies +that "platformatic db types" will ask you.

Here is an example of a platformatic plugin.js with jsdoc support. +You can use it to add autocomplete to your code.

/// <reference path="./global.d.ts" />
'use strict'

/** @param {import('fastify').FastifyInstance} app */
module.exports = async function (app) {
app.get('/movie', async () => {
const movies = await app.platformatic.entities.movie.find({
where: { title: { eq: 'The Hitchhiker\'s Guide to the Galaxy' } }
})
return movies[0].id
})
}

If not specified, the configuration will be loaded from any of the following, in the current directory.

  • platformatic.db.json, or
  • platformatic.db.yml, or
  • platformatic.db.tml

You can find more details about the configuration format here:

service

platformatic service <command>

compile

Compile typescript plugins.

  $ platformatic service compile

As a result of executing this command, Platformatic Service will compile typescript +plugins in the outDir directory.

If not specified, the configuration will be loaded from any of the following, in the current directory.

  • platformatic.service.json, or
  • platformatic.service.yml, or
  • platformatic.service.tml

You can find more details about the configuration format here:

help

Available commands:

  • help - show this help message.
  • help <command> - shows more information about a command.
  • start - start the server.
  • schema config - generate the schema configuration file.

schema

Update the config schema file:

  • schema config - update the JSON schema config available on platformatic.service.schema.json

Your configuration on platformatic.service.json has a schema defined to improve the developer experience and avoid mistakes when updating the configuration of Platformatic Service. +When you initialize a new Platformatic service (f.e. running npm create platformatic@latest), a new JSON $schema property is added in the platformatic.service.json config. This can allow your IDE to add suggestions (f.e. mandatory/missing fields, types, default values) by opening the config in platformatic.service.json. +Running platformatic service schema config you can update your schema so that it matches well the latest changes available on your config.

start

Start the Platformatic Service with the following command:

 $ platformatic service start

You will need a configuration file. Here is an example to get you started, +save the following as platformatic.service.json:

{
"server": {
"hostname": "127.0.0.1",
"port": 0,
"logger": {
"level": "info"
}
},
"plugin": {
"path": "./plugin.js"
}
}

frontend

platformatic frontend <url> <language>

Create frontend code to consume the REST APIs of a Platformatic application.

From the directory you want the frontend code to be generated (typically <YOUR_FRONTEND_APP_DIRECTORY>/src/) run -

npx platformatic frontend http://127.0.0.1:3042 ts

ℹ️

Where http://127.0.0.1:3042 must be replaced with your Platformatic application endpoint, and the language can either be ts or js. When the command is run, the Platformatic CLI generates -

  • api.d.ts - A TypeScript module that includes all the OpenAPI-related types.
  • api.ts or api.js - A module that includes a function for every single REST endpoint.

If you use the --name option it will create custom file names.

npx platformatic frontend http://127.0.0.1:3042 ts --name foobar

Will create foobar.ts and foobar-types.d.ts

Refer to the dedicated guide where the full process of generating and consuming the frontend code is described.

In case of problems, please check that:

  • The Platformatic app URL is valid.
  • The Platformatic app whose URL belongs must be up and running.
  • OpenAPI must be enabled (db.openapi in your platformatic.db.json is not set to false). You can find more details about the db configuration format here.
  • CORS must be managed in your Platformatic app (server.cors.origin.regexp in your platformatic.db.json is set to /*/, for instance). You can find more details about the cors configuration here.

runtime

platformatic runtime <command>

compile

Compile all typescript plugins for all services.

  $ platformatic runtime compile

This command will compile the TypeScript +plugins for each services registered in the runtime.

help

Available commands:

  • help - show this help message.
  • help <command> - shows more information about a command.
  • start - start the application.

start

Start the Platformatic Runtime with the following command:

 $ platformatic runtime start

start

Start a Platformatic application with the following command:

$ platformatic start

Options:

  • -c, --config <path> - Path to the configuration file.
  • --inspect[=[host:]port] - Start the Node.js debugger. host defaults to '127.0.0.1'. port defaults to 9229. Use caution when binding to a public host:port combination.
  • --inspect-brk[=[host:]port] - Start the Node.js debugger and block until a client has attached. host defaults to '127.0.0.1'. port defaults to 9229. Use caution when binding to a public host:port combination.
+ + + + \ No newline at end of file diff --git a/docs/0.41.2/reference/client/frontend/index.html b/docs/0.41.2/reference/client/frontend/index.html new file mode 100644 index 00000000000..128a0d1071b --- /dev/null +++ b/docs/0.41.2/reference/client/frontend/index.html @@ -0,0 +1,17 @@ + + + + + +Frontend client | Platformatic Open Source Software + + + + + +
+
Version: 0.41.2

Frontend client

Create implementation and type files that exposes a client for a remote OpenAPI server, that uses fetch and can run in any browser.

To create a client for a remote OpenAPI API, you can use the following command:

$ platformatic frontend http://exmaple.com/to/schema/file <language> --name <clientname>

where <language> can be either js or ts.

This will create two files clientname.js (or clientname.ts) and clientname-types.d.ts for types.

clientname by default is api

Usage

The implementation generated by the tool exports all the named operation found and a factory object.

Named operations

import { setBaseUrl, getMovies } from './api.js'

setBaseUrl('http://my-server-url.com') // modifies the global `baseUrl` variable

const movies = await getMovies({})
console.log(movies)

Factory

The factory object is called build and can be used like this

import build from './api.js'

const client = build('http://my-server-url.com')

const movies = await client.getMovies({})
console.log(movies)

You can use both named operations and the factory in the same file. They can work on different hosts, so the factory does not use the global setBaseUrl function.

Generated Code

The type file will look like this

export interface GetMoviesRequest {
'limit'?: number;
'offset'?: number;
// ... all other options
}

interface GetMoviesResponseOK {
'id': number;
'title': string;
}
export interface Api {
setBaseUrl(newUrl: string) : void;
getMovies(req: GetMoviesRequest): Promise<Array<GetMoviesResponseOK>>;
// ... all operations listed here
}

type PlatformaticFrontendClient = Omit<Api, 'setBaseUrl'>
export default function build(url: string): PlatformaticFrontendClient

The javascript implementation will look like this

let baseUrl = ''
/** @type {import('./api-types.d.ts').Api['setBaseUrl']} */
export const setBaseUrl = (newUrl) => { baseUrl = newUrl }

/** @type {import('./api-types.d.ts').Api['getMovies']} */
export const getMovies = async (request) => {
return await _getMovies(baseUrl, request)
}
async function _createMovie (url, request) {
const response = await fetch(`${url}/movies/`, {
method:'post',
body: JSON.stringify(request),
headers: {
'Content-Type': 'application/json'
}
})

if (!response.ok) {
throw new Error(await response.text())
}

return await response.json()
}

/** @type {import('./api-types.d.ts').Api['createMovie']} */
export const createMovie = async (request) => {
return await _createMovie(baseUrl, request)
}
// ...

export default function build (url) {
return {
getMovies: _getMovies.bind(url, ...arguments),
// ...
}
}

The typescript implementation will look like this

import type { Api } from './api-types'
import * as Types from './api-types'

let baseUrl = ''
export const setBaseUrl = (newUrl: string) : void => { baseUrl = newUrl }

const _getMovies = async (url: string, request: Types.GetMoviesRequest) => {
const response = await fetch(`${url}/movies/?${new URLSearchParams(Object.entries(request || {})).toString()}`)

if (!response.ok) {
throw new Error(await response.text())
}

return await response.json()
}

export const getMovies: Api['getMovies'] = async (request: Types.GetMoviesRequest) => {
return await _getMovies(baseUrl, request)
}
// ...
export default function build (url) {
return {
getMovies: _getMovies.bind(url, ...arguments),
// ...
}
}
+ + + + \ No newline at end of file diff --git a/docs/0.41.2/reference/client/introduction/index.html b/docs/0.41.2/reference/client/introduction/index.html new file mode 100644 index 00000000000..b2354f2b760 --- /dev/null +++ b/docs/0.41.2/reference/client/introduction/index.html @@ -0,0 +1,34 @@ + + + + + +Platformatic Client | Platformatic Open Source Software + + + + + +
+
Version: 0.41.2

Platformatic Client

Create a Fastify plugin that exposes a client for a remote OpenAPI or GraphQL API.

To create a client for a remote OpenAPI API, you can use the following command:

$ platformatic client http://exmaple.com/to/schema/file --name myclient

To create a client for a remote Graphql API, you can use the following command:

$ platformatic client http://exmaple.com/grapqhl --name myclient

Usage with Platformatic Service or Platformatic DB

If you run the generator in in a Platformatic application, and it will +automatically extend it to load your client by editing the configuration file +and adding a clients section. +Then, in any part of your Platformatic application you can use the client.

You can use the client in your application in Javascript, calling a GraphQL endpoint:

// Use a typescript reference to set up autocompletion
// and explore the generated APIs.

/// <reference path="./myclient" />

/** @type {import('fastify').FastifyPluginAsync<{} */
module.exports = async function (app, opts) {
app.post('/', async (request, reply) => {
const res = await app.myclient.graphql({
query: 'query { movies { title } }'
})
return res
})
}

or in Typescript, calling an OpenAPI endpoint:

import { FastifyInstance } from 'fastify'
/// <reference path="./myclient" />

export default async function (app: FastifyInstance) {
app.get('/', async () => {
return app.myclient.get({})
})
}

The client configuration in the platformatic.db.json and platformatic.service.json would look like:

{
"clients": [{
"schema": "./myclient/myclient.openapi.json" // or ./myclient/myclient.schema.graphl
"name": "myclient",
"type": "openapi" // or graphql
"url": "{ PLT_MYCLIENT_URL }"
}]
}

Note that the generator would also have updated the .env and .env.sample files if they exists.

Generating a client for a service running within Platformatic Runtime

Platformatic Runtime allows you to create a network of services that are not exposed. +To create a client to invoke one of those services from another, run:

$ platformatic client --name <clientname> --runtime <serviceId>

Where <clientname> is the name of the client and <serviceId> is the id of the given service +(which correspond in the basic case with the folder name of that service). +The client generated is identical to the one in the previous section.

Note that this command looks for a platformatic.runtime.json in a parent directory.

Example

As an example, consider a network of three microservices:

  • somber-chariot, an instance of Platformatic DB;
  • languid-noblemen, an instance of Platformatic Service;
  • pricey-paesant, an instance of Platformatic Composer, which is also the runtime entrypoint.

From within the languid-noblemen folder, we can run:

$ platformatic client --name chariot --runtime somber-chariot

The client configuration in the platformatic.db.json and platformatic.service.json would look like:

{
"clients": [{
"path": "./chariot",
"serviceId": "somber-chariot"
}]
}

Even if the client is generated from an HTTP endpoint, it is possible to add a serviceId property each client object shown above. +This is not required, but if using the Platformatic Runtime, the serviceId +property will be used to identify the service dependency.

Types Generator

The types for the client are automatically generated for both OpenAPI and GraphQL schemas.

You can generate only the types with the --types-only flag.

For example

$ platformatic client http://exmaple.com/to/schema/file --name myclient --types-only

Will create the single myclient.d.ts file in current directory

OpenAPI

We provide a fully typed experience for OpenAPI, Typing both the request and response for +each individual OpenAPI operation.

Consider this example:

// Omitting all the individual Request and Reponse payloads for brevity

interface Client {
getMovies(req: GetMoviesRequest): Promise<Array<GetMoviesResponse>>;
createMovie(req: CreateMovieRequest): Promise<CreateMovieResponse>;
updateMovies(req: UpdateMoviesRequest): Promise<Array<UpdateMoviesResponse>>;
getMovieById(req: GetMovieByIdRequest): Promise<GetMovieByIdResponse>;
updateMovie(req: UpdateMovieRequest): Promise<UpdateMovieResponse>;
updateMovie(req: UpdateMovieRequest): Promise<UpdateMovieResponse>;
deleteMovies(req: DeleteMoviesRequest): Promise<DeleteMoviesResponse>;
getQuotesForMovie(req: GetQuotesForMovieRequest): Promise<Array<GetQuotesForMovieResponse>>;
getQuotes(req: GetQuotesRequest): Promise<Array<GetQuotesResponse>>;
createQuote(req: CreateQuoteRequest): Promise<CreateQuoteResponse>;
updateQuotes(req: UpdateQuotesRequest): Promise<Array<UpdateQuotesResponse>>;
getQuoteById(req: GetQuoteByIdRequest): Promise<GetQuoteByIdResponse>;
updateQuote(req: UpdateQuoteRequest): Promise<UpdateQuoteResponse>;
updateQuote(req: UpdateQuoteRequest): Promise<UpdateQuoteResponse>;
deleteQuotes(req: DeleteQuotesRequest): Promise<DeleteQuotesResponse>;
getMovieForQuote(req: GetMovieForQuoteRequest): Promise<GetMovieForQuoteResponse>;
}

type ClientPlugin = FastifyPluginAsync<NonNullable<client.ClientOptions>>

declare module 'fastify' {
interface FastifyInstance {
'client': Client;
}

interface FastifyRequest {
'client': Client;
}
}

declare namespace Client {
export interface ClientOptions {
url: string
}
export const client: ClientPlugin;
export { client as default };
}

declare function client(...params: Parameters<ClientPlugin>): ReturnType<ClientPlugin>;
export = client;

GraphQL

We provide a partially typed experience for GraphQL, because we do not want to limit +how you are going to query the remote system. Take a look at this example:

declare module 'fastify' {
interface GraphQLQueryOptions {
query: string;
headers: Record<string, string>;
variables: Record<string, unknown>;
}
interface GraphQLClient {
graphql<T>(GraphQLQuery): PromiseLike<T>;
}
interface FastifyInstance {
'client'
: GraphQLClient;

}

interface FastifyRequest {
'client'<T>(GraphQLQuery): PromiseLike<T>;
}
}

declare namespace client {
export interface Clientoptions {
url: string
}
export interface Movie {
'id'?: string;

'title'?: string;

'realeasedDate'?: string;

'createdAt'?: string;

'preferred'?: string;

'quotes'?: Array<Quote>;

}
export interface Quote {
'id'?: string;

'quote'?: string;

'likes'?: number;

'dislikes'?: number;

'movie'?: Movie;

}
export interface MoviesCount {
'total'?: number;

}
export interface QuotesCount {
'total'?: number;

}
export interface MovieDeleted {
'id'?: string;

}
export interface QuoteDeleted {
'id'?: string;

}
export const client: Clientplugin;
export { client as default };
}

declare function client(...params: Parameters<Clientplugin>): ReturnType<Clientplugin>;
export = client;

Given only you can know what GraphQL query you are producing, you are responsible for typing +it accordingly.

Usage with standalone Fastify

If a platformatic configuration file is not found, a complete Fastify plugin is generated to be +used in your Fastify application like so:

const fastify = require('fastify')()
const client = require('./your-client-name')

fastify.register(client, {
url: 'http://example.com'
})

// GraphQL
fastify.post('/', async (request, reply) => {
const res = await request.movies.graphql({
query: 'mutation { saveMovie(input: { title: "foo" }) { id, title } }'
})
return res
})

// OpenAPI
fastify.post('/', async (request, reply) => {
const res = await request.movies.createMovie({ title: 'foo' })
return res
})

fastify.listen({ port: 3000 })

Note that you would need to install @platformatic/client as a depedency.

How are the method names defined in OpenAPI

The names of the operations are defined in the OpenAPI specification. +Specifically, we use the operationId. +If that's not part of the spec, +the name is generated by combining the parts of the path, +like /something/{param1}/ and a method GET, it genertes getSomethingParam1.

Authentication

It's very common that downstream services requires some form of Authentication. +How could we add the necessary headers? You can configure them from your plugin:

/// <reference path="./myclient" />

/** @type {import('fastify').FastifyPluginAsync<{} */
module.exports = async function (app, opts) {
app.configureMyclient({
async getHeaders (req, reply) {
return {
'foo': 'bar'
}
}
})

app.post('/', async (request, reply) => {
const res = await app.myclient.graphql({
query: 'query { movies { title } }'
})
return res
})
}

Telemetry propagation

To correctly propagate telemetry information, be sure to get the client from the request object, e.g.:

fastify.post('/', async (request, reply) => {
const res = await request.movies.createMovie({ title: 'foo' })
return res
})
+ + + + \ No newline at end of file diff --git a/docs/0.41.2/reference/client/programmatic/index.html b/docs/0.41.2/reference/client/programmatic/index.html new file mode 100644 index 00000000000..16d3f9484aa --- /dev/null +++ b/docs/0.41.2/reference/client/programmatic/index.html @@ -0,0 +1,17 @@ + + + + + +Programmatic API | Platformatic Open Source Software + + + + + +
+
Version: 0.41.2

Programmatic API

It is possible to use the Platformatic client without the generator.

OpenAPI Client

import { buildOpenAPIClient } from '@platformatic/client'

const client = await buildOpenAPIClient({
url: `https://yourapi.com/documentation/json`,
// path: 'path/to/openapi.json',
headers: {
'foo': 'bar'
}
})

const res = await client.yourOperationName({ foo: 'bar' })

console.log(res)

If you use Typescript you can take advantage of the generated types file

import { buildOpenAPIClient } from '@platformatic/client'
import Client from './client'
//
// interface Client {
// getMovies(req: GetMoviesRequest): Promise<Array<GetMoviesResponse>>;
// createMovie(req: CreateMovieRequest): Promise<CreateMovieResponse>;
// ...
// }
//

const client: Client = await buildOpenAPIClient<Client>({
url: `https://yourapi.com/documentation/json`,
// path: 'path/to/openapi.json',
headers: {
'foo': 'bar'
}
})

const res = await client.getMovies()
console.log(res)

GraphQL Client

import { buildGraphQLClient } from '@platformatic/client'

const client = await buildGraphQLClient({
url: `https://yourapi.com/graphql`,
headers: {
'foo': 'bar'
}
})

const res = await client.graphql({
query: `
mutation createMovie($title: String!) {
saveMovie(input: {title: $title}) {
id
title
}
}
`,
variables: {
title: 'The Matrix'
}
})

console.log(res)
+ + + + \ No newline at end of file diff --git a/docs/0.41.2/reference/composer/api-modification/index.html b/docs/0.41.2/reference/composer/api-modification/index.html new file mode 100644 index 00000000000..e0b8fd21464 --- /dev/null +++ b/docs/0.41.2/reference/composer/api-modification/index.html @@ -0,0 +1,19 @@ + + + + + +API modification | Platformatic Open Source Software + + + + + +
+
Version: 0.41.2

API modification

If you want to modify automatically generated API, you can use composer custom onRoute hook.

addComposerOnRouteHook(openApiPath, methods, handler)

  • openApiPath (string) - A route OpenAPI path that Platformatic Composer takes from the OpenAPI specification.
  • methods (string[]) - Route HTTP methods that Platformatic Composer takes from the OpenAPI specification.
  • handler (function) - fastify onRoute hook handler.

onComposerResponse

onComposerResponse hook is called after the response is received from a composed service. +It might be useful if you want to modify the response before it is sent to the client. +If you want to use it you need to add onComposerResponse property to the config object of the route options.

  • request (object) - fastify request object.
  • reply (object) - fastify reply object.
  • body (object) - undici response body object.

Example

app.platformatic.addComposerOnRouteHook('/users/{id}', ['GET'], routeOptions => {
routeOptions.schema.response[200] = {
type: 'object',
properties: {
firstName: { type: 'string' },
lastName: { type: 'string' }
}
}

async function onComposerResponse (request, reply, body) {
const payload = await body.json()
const newPayload = {
firstName: payload.first_name,
lastName: payload.last_name
}
reply.send(newPayload)
}
routeOptions.config.onComposerResponse = onComposerResponse
})
+ + + + \ No newline at end of file diff --git a/docs/0.41.2/reference/composer/configuration/index.html b/docs/0.41.2/reference/composer/configuration/index.html new file mode 100644 index 00000000000..8b835d04c9e --- /dev/null +++ b/docs/0.41.2/reference/composer/configuration/index.html @@ -0,0 +1,38 @@ + + + + + +Configuration | Platformatic Open Source Software + + + + + +
+
Version: 0.41.2

Configuration

Platformatic Composer configured with a configuration file. It supports the use +of environment variables as setting values with configuration placeholders.

Configuration file

If the Platformatic CLI finds a file in the current working directory matching +one of these filenames, it will automatically load it:

  • platformatic.composer.json
  • platformatic.composer.json5
  • platformatic.composer.yml or platformatic.composer.yaml
  • platformatic.composer.tml or platformatic.composer.toml

Alternatively, a --config option with a configuration +filepath can be passed to most platformatic composer CLI commands.

The configuration examples in this reference use JSON.

Supported formats

FormatExtensions
JSON.json
JSON5.json5
YAML.yml, .yaml
TOML.tml

Comments are supported by the JSON5, YAML and TOML file formats.

Settings

Configuration settings are organised into the following groups:

Sensitive configuration settings containing sensitive data should be set using configuration placeholders.

server

A required object with the following settings:

  • hostname (required, string) — Hostname where Platformatic Composer server will listen for connections.

  • port (required, number) — Port where Platformatic Composer server will listen for connections.

  • healthCheck (boolean or object) — Enables the health check endpoint.

    • Powered by @fastify/under-pressure.
    • The value can be an object, used to specify the interval between checks in milliseconds (default: 5000)

    Example

    {
    "server": {
    ...
    "healthCheck": {
    "interval": 2000
    }
    }
    }
  • cors (object) — Configuration for Cross-Origin Resource Sharing (CORS) headers.

    • All options will be passed to the @fastify/cors plugin. In order to specify a RegExp object, you can pass { regexp: 'yourregexp' }, +it will be automatically converted.
  • logger (object) -- the logger configuration.

  • pluginTimeout (integer) -- the number of milliseconds to wait for a Fastify plugin to load, see the fastify docs for more details.

  • https (object) - Configuration for HTTPS supporting the following options.

    • key (required, string, object, or array) - If key is a string, it specifies the private key to be used. If key is an object, it must have a path property specifying the private key file. Multiple keys are supported by passing an array of keys.
    • cert (required, string, object, or array) - If cert is a string, it specifies the certificate to be used. If cert is an object, it must have a path property specifying the certificate file. Multiple certificates are supported by passing an array of keys.

metrics

Configuration for a Prometheus server that will export monitoring metrics +for the current server instance. It uses fastify-metrics +under the hood.

This setting can be a boolean or an object. If set to true the Prometheus server will listen on http://0.0.0.0:9090.

Supported object properties:

  • hostname (string) — The hostname where Prometheus server will listen for connections.
  • port (number) — The port where Prometheus server will listen for connections.
  • auth (object) — Basic Auth configuration. username and password are required here +(use environment variables).

plugins

An optional object that defines the plugins loaded by Platformatic Composer.

  • paths (required, array): an array of paths (string) +or an array of objects composed as follows,

    • path (string): Relative path to plugin's entry point.
    • options (object): Optional plugin options.
    • encapsulate (boolean): if the path is a folder, it instruct Platformatic to not encapsulate those plugins.
    • maxDepth (integer): if the path is a folder, it limits the depth to load the content from.
  • typescript (boolean): enable typescript compilation. A tsconfig.json file is required in the same folder.

    Example

    {
    "plugins": {
    "paths": [{
    "path": "./my-plugin.js",
    "options": {
    "foo": "bar"
    }
    }]
    }
    }

watch

Disable watching for file changes if set to false. It can also be customized with the following options:

  • ignore (string[], default: null): List of glob patterns to ignore when watching for changes. If null or not specified, ignore rule is not applied. Ignore option doesn't work for typescript files.

  • allow (string[], default: ['*.js', '**/*.js']): List of glob patterns to allow when watching for changes. If null or not specified, allow rule is not applied. Allow option doesn't work for typescript files.

    Example

    {
    "watch": {
    "ignore": ["*.mjs", "**/*.mjs"],
    "allow": ["my-plugin.js", "plugins/*.js"]
    }
    }

composer

Configure @platformatic/composer specific settings such as services or refreshTimeout:

  • services (array, default: []) — is an array of objects that defines +the services managed by the composer. Each service object supports the following settings:

    • id (required, string) - A unique identifier for the service.
    • origin (string) - A service origin. Skip this option if the service is executing inside of Platformatic Runtime. In this case, service id will be used instead of origin.
    • openapi (required, object) - The configuration file used to compose OpenAPI specification. See the openapi for details.
    • proxy (object or false) - Service proxy configuration. If false, the service proxy is disabled.
      • prefix (required, string) - Service proxy prefix. All service routes will be prefixed with this value.
    • refreshTimeout (number) - The number of milliseconds to wait for check for changes in the service OpenAPI specification. If not specified, the default value is 1000.

openapi

  • url (string) - A path of the route that exposes the OpenAPI specification. If a service is a Platformatic Service or Platformatic DB, use /documentation/json as a value. Use this or file option to specify the OpenAPI specification.
  • file (string) - A path to the OpenAPI specification file. Use this or url option to specify the OpenAPI specification.
  • prefix (string) - A prefix for the OpenAPI specification. All service routes will be prefixed with this value.
  • config (string) - A path to the OpenAPI configuration file. This file is used to customize the OpenAPI specification. See the openapi-configuration for details.
openapi-configuration

The OpenAPI configuration file is a JSON file that is used to customize the OpenAPI specification. It supports the following options:

  • ignore (boolean) - If true, the route will be ignored by the composer. +If you want to ignore a specific method, use the ignore option in the nested method object.

    Example

    {
    "paths": {
    "/users": {
    "ignore": true
    },
    "/users/{id}": {
    "get": { "ignore": true },
    "put": { "ignore": true }
    }
    }
    }
  • alias (string) - Use it create an alias for the route path. Original route path will be ignored.

    Example

    {
    "paths": {
    "/users": {
    "alias": "/customers"
    }
    }
    }
  • rename (string) - Use it to rename composed route response fields. +Use json schema format to describe the response structure. For now it works only for 200 response.

    Example

    {
    "paths": {
    "/users": {
    "responses": {
    "200": {
    "type": "array",
    "items": {
    "type": "object",
    "properties": {
    "id": { "rename": "user_id" },
    "name": { "rename": "first_name" }
    }
    }
    }
    }
    }
    }
    }

Examples

Composition of two remote services:

{
"composer": {
"services": [
{
"id": "auth-service",
"origin": "https://auth-service.com",
"openapi": {
"url": "/documentation/json",
"prefix": "auth"
}
},
{
"id": "payment-service",
"origin": "https://payment-service.com",
"openapi": {
"file": "./schemas/payment-service.json"
}
}
],
"refreshTimeout": 1000
}
}

Composition of two local services inside of Platformatic Runtime:

{
"composer": {
"services": [
{
"id": "auth-service",
"openapi": {
"url": "/documentation/json",
"prefix": "auth"
}
},
{
"id": "payment-service",
"openapi": {
"file": "./schemas/payment-service.json"
}
}
],
"refreshTimeout": 1000
}
}

telemetry

Open Telemetry is optionally supported with these settings:

  • serviceName (required, string) — Name of the service as will be reported in open telemetry.
  • version (string) — Optional version (free form)
  • skip (array). Optional list of operations to skip when exporting telemetry defined object with properties:
    • method: GET, POST, PUT, DELETE, PATCH, HEAD, OPTIONS, TRACE
    • path. e.g.: /documentation/json
  • exporter (object or array) — Exporter configuration. If not defined, the exporter defaults to console. If an array of objects is configured, every object must be a valid exporter object. The exporter object has the following properties:
    • type (string) — Exporter type. Supported values are console, otlp, zipkin and memory (default: console). memory is only supported for testing purposes.
    • options (object) — These options are supported:
      • url (string) — The URL to send the telemetry to. Required for otlp exporter. This has no effect on console and memory exporters.
      • headers (object) — Optional headers to send with the telemetry. This has no effect on console and memory exporters.

Note that OTLP traces can be consumed by different solutions, like Jaeger. Here the full list.

Example

{
"telemetry": {
"serviceName": "test-service",
"exporter": {
"type": "otlp",
"options": {
"url": "http://localhost:4318/v1/traces"
}
}
}
}

Environment variable placeholders

The value for any configuration setting can be replaced with an environment variable +by adding a placeholder in the configuration file, for example {PLT_SERVER_LOGGER_LEVEL}.

All placeholders in a configuration must be available as an environment variable +and must meet the allowed placeholder name rules.

Example

platformatic.service.json
{
"server": {
"port": "{PORT}"
}
}

Platformatic will replace the placeholders in this example with the environment +variables of the same name.

Setting environment variables

If a .env file exists it will automatically be loaded by Platformatic using +dotenv. For example:

.env
PLT_SERVER_LOGGER_LEVEL=info
PORT=8080

The .env file must be located in the same folder as the Platformatic configuration +file or in the current working directory.

Environment variables can also be set directly on the commmand line, for example:

PLT_SERVER_LOGGER_LEVEL=debug npx platformatic composer

Allowed placeholder names

Only placeholder names prefixed with PLT_, or that are in this allow list, will be +dynamically replaced in the configuration file:

  • PORT

This restriction is to avoid accidentally exposing system environment variables. +An error will be raised by Platformatic if it finds a configuration placeholder +that isn't allowed.

The default allow list can be extended by passing a --allow-env CLI option with a +comma separated list of strings, for example:

npx platformatic composer --allow-env=HOST,SERVER_LOGGER_LEVEL

If --allow-env is passed as an option to the CLI, it will be merged with the +default allow list.

+ + + + \ No newline at end of file diff --git a/docs/0.41.2/reference/composer/introduction/index.html b/docs/0.41.2/reference/composer/introduction/index.html new file mode 100644 index 00000000000..c471c186918 --- /dev/null +++ b/docs/0.41.2/reference/composer/introduction/index.html @@ -0,0 +1,22 @@ + + + + + +Platformatic Composer | Platformatic Open Source Software + + + + + +
+
Version: 0.41.2

Platformatic Composer

Platformatic Composer is an HTTP server that automatically aggregates multiple +services APIs into a single API.

info

Platformatic Composer is currently in public beta.

Features

Public beta

Platformatic Composer is in public beta. You can use it in production, but it's quite +likely that you'll encounter significant bugs.

If you run into a bug or have a suggestion for improvement, please +raise an issue on GitHub.

Standalone usage

If you're only interested in the features available in Platformatic Composer, you can replace platformatic with @platformatic/composer in the dependencies of your package.json, so that you'll import fewer deps.

Example configuration file

The following configuration file can be used to start a new Platformatic +Composer project. For more details on the configuration file, see the +configuration documentation.

{
"$schema": "https://platformatic.dev/schemas/v0.26.0/composer",
"server": {
"hostname": "127.0.0.1",
"port": 0,
"logger": {
"level": "info"
}
},
"composer": {
"services": [
{
"id": "auth-service",
"origin": "https://auth-service.com",
"openapi": {
"url": "/documentation/json",
"prefix": "auth"
}
},
{
"id": "payment-service",
"origin": "https://payment-service.com",
"openapi": {
"url": "/documentation/json"
}
}
],
"refreshTimeout": 1000
},
"watch": true
}
+ + + + \ No newline at end of file diff --git a/docs/0.41.2/reference/composer/plugin/index.html b/docs/0.41.2/reference/composer/plugin/index.html new file mode 100644 index 00000000000..0023152bd69 --- /dev/null +++ b/docs/0.41.2/reference/composer/plugin/index.html @@ -0,0 +1,18 @@ + + + + + +Plugin | Platformatic Open Source Software + + + + + +
+
Version: 0.41.2

Plugin

If you want to add features to a service, you will need to register a plugin, which will be in the form of a standard Fastify plugin.

The config file will specify where the plugin file is located as the example below:

{
...
"plugins": {
"paths": ["./plugin/index.js"]
}
}

The path is relative to the config file path.

You should export an async function which receives a parameters

  • app (FastifyInstance) that is the main fastify instance
  • opts all the options specified in the config file after path

Hot Reload

Plugin file is being watched by fs.watch function.

You don't need to reload Platformatic Composer server while working on your plugin. Every time you save, the watcher will trigger a reload event and the server will auto-restart and load your updated code.

tip

At this time, on Linux, file watch in subdirectories is not supported due to a Node.js limitation (documented here).

Directories

The path can also be a directory. In that case, the directory will be loaded with @fastify/autoload.

Consider the following directory structure:

├── routes
│ ├── foo
│ │ ├── something.js
│ │ └── bar
│ │ └── baz.js
│ ├── single-plugin
│ │ └── utils.js
│ └── another-plugin.js
└── platformatic.composer.json

By default the folder will be added as a prefix to all the routes defined within them. +See the autoload documentation for all the options to customize this behavior.

Multiple plugins

Multiple plugins can be loaded in parallel by specifying an array:

{
...
"plugins": {
"paths": [{
"path": "./plugin/index.js"
}, {
"path": "./routes/"
}]
}
}
+ + + + \ No newline at end of file diff --git a/docs/0.41.2/reference/composer/programmatic/index.html b/docs/0.41.2/reference/composer/programmatic/index.html new file mode 100644 index 00000000000..09a61dcdc3d --- /dev/null +++ b/docs/0.41.2/reference/composer/programmatic/index.html @@ -0,0 +1,18 @@ + + + + + +Programmatic API | Platformatic Open Source Software + + + + + +
+
Version: 0.41.2

Programmatic API

In many cases it's useful to start Platformatic Composer using an API instead of +command line, e.g. in tests we want to start and stop our server.

The buildServer function allows that:

import { buildServer } from '@platformatic/composer'

const app = await buildServer('path/to/platformatic.composer.json')
await app.start()

const res = await fetch(app.url)
console.log(await res.json())

// do something

await app.close()

It is also possible to customize the configuration:

import { buildServer } from '@platformatic/composer'

const app = await buildServer({
server: {
hostname: '127.0.0.1',
port: 0
},
services: [
{
id: 'auth-service',
origin: 'https://auth-service.com',
openapi: {
url: '/documentation/json',
prefix: 'auth'
}
},
{
id: 'payment-service',
origin: 'https://payment-service.com',
openapi: {
file: './schemas/payment-service.json'
}
}
]
})

await app.start()

const res = await fetch(app.url)
console.log(await res.json())

// do something

await app.close()
+ + + + \ No newline at end of file diff --git a/docs/0.41.2/reference/db/authorization/introduction/index.html b/docs/0.41.2/reference/db/authorization/introduction/index.html new file mode 100644 index 00000000000..2a3fef8cfd4 --- /dev/null +++ b/docs/0.41.2/reference/db/authorization/introduction/index.html @@ -0,0 +1,21 @@ + + + + + +Authorization | Platformatic Open Source Software + + + + + +
+
Version: 0.41.2

Authorization

Introduction

Authorization in Platformatic DB is role-based. User authentication and the +assignment of roles must be handled by an external authentication service.

Configuration

Authorization strategies and rules are configured via a Platformatic DB +configuration file. See the Platformatic DB Configuration +documentation for the supported settings.

Bypass authorization in development

To make testing and developing easier, it's possible to bypass authorization checks +if an adminSecret is set. See the HTTP headers (development only) documentation.

+ + + + \ No newline at end of file diff --git a/docs/0.41.2/reference/db/authorization/rules/index.html b/docs/0.41.2/reference/db/authorization/rules/index.html new file mode 100644 index 00000000000..a9cb4c00ba7 --- /dev/null +++ b/docs/0.41.2/reference/db/authorization/rules/index.html @@ -0,0 +1,28 @@ + + + + + +Rules | Platformatic Open Source Software + + + + + +
+
Version: 0.41.2

Rules

Introduction

Authorization rules can be defined to control what operations users are +able to execute via the REST or GraphQL APIs that are exposed by a Platformatic +DB app.

Every rule must specify:

  • role (required) — A role name. It's a string and must match with the role(s) set by an external authentication service.
  • entity (optional) — The Platformatic DB entity to apply this rule to.
  • entities (optional) — The Platformatic DB entities to apply this rule to.
  • defaults (optional) — Configure entity fields that will be +automatically set from user data.
  • One entry for each supported CRUD operation: find, save, delete

One of entity and entities must be specified.

Operation checks

Every entity operation — such as find, insert, save or delete — can have +authorization checks specified for them. This value can be false (operation disabled) +or true (operation enabled with no checks).

To specify more fine-grained authorization controls, add a checks field, e.g.:

{
"role": "user",
"entity": "page",
"find": {
"checks": {
"userId": "X-PLATFORMATIC-USER-ID"
}
},
...
}

In this example, when a user with a user role executes a findPage, they can +access all the data that has userId equal to the value in user metadata with +key X-PLATFORMATIC-USER-ID.

Note that "userId": "X-PLATFORMATIC-USER-ID" is syntactic sugar for:

      "find": {
"checks": {
"userId": {
"eq": "X-PLATFORMATIC-USER-ID"
}
}
}

It's possible to specify more complex rules using all the supported where clause operators.

Note that userId MUST exist as a field in the database table to use this feature.

GraphQL events and subscriptions

Platformatic DB supports GraphQL subscriptions and therefore db-authorization must protect them. +The check is performed based on the find permissions, the only permissions that are supported are:

  1. find: false, the subscription for that role is disabled
  2. find: { checks: { [prop]: 'X-PLATFORMATIC-PROP' } } validates that the given prop is equal
  3. find: { checks: { [prop]: { eq: 'X-PLATFORMATIC-PROP' } } } validates that the given prop is equal

Conflicting rules across roles for different equality checks will not be supported.

Restrict access to entity fields

If a fields array is present on an operation, Platformatic DB restricts the columns on which the user can execute to that list. +For save operations, the configuration must specify all the not-nullable fields (otherwise, it would fail at runtime). +Platformatic does these checks at startup.

Example:

    "rule": {
"entity": "page",
"role": "user",
"find": {
"checks": {
"userId": "X-PLATFORMATIC-USER-ID"
},
"fields": ["id", "title"]
}
...
}

In this case, only id and title are returned for a user with a user role on the page entity.

Set entity fields from user metadata

Defaults are used in database insert and are default fields added automatically populated from user metadata, e.g.:

        "defaults": {
"userId": "X-PLATFORMATIC-USER-ID"
},

When an entity is created, the userId column is used and populated using the value from user metadata.

Programmatic rules

If it's necessary to have more control over the authorizations, it's possible to specify the rules programmatically, e.g.:


app.register(auth, {
jwt: {
secret: 'supersecret'
},
rules: [{
role: 'user',
entity: 'page',
async find ({ user, ctx, where }) {
return {
...where,
userId: {
eq: user['X-PLATFORMATIC-USER-ID']
}
}
},
async delete ({ user, ctx, where }) {
return {
...where,
userId: {
eq: user['X-PLATFORMATIC-USER-ID']
}
}
},
defaults: {
userId: async function ({ user, ctx, input }) {
match(user, {
'X-PLATFORMATIC-USER-ID': generated.shift(),
'X-PLATFORMATIC-ROLE': 'user'
})
return user['X-PLATFORMATIC-USER-ID']
}

},
async save ({ user, ctx, where }) {
return {
...where,
userId: {
eq: user['X-PLATFORMATIC-USER-ID']
}
}
}
}]
})

In this example, the user role can delete all the posts edited before yesterday:

 app.register(auth, {
jwt: {
secret: 'supersecret'
},
roleKey: 'X-PLATFORMATIC-ROLE',
anonymousRole: 'anonymous',
rules: [{
role: 'user',
entity: 'page',
find: true,
save: true,
async delete ({ user, ctx, where }) {
return {
...where,
editedAt: {
lt: yesterday
}
}
},
defaults: {
userId: 'X-PLATFORMATIC-USER-ID'
}
}]
})

Access validation on entity mapper for plugins

To assert that a specific user with it's role(s) has the correct access rights to use entities on a platformatic plugin the context should be passed to the entity mapper in order to verify it's permissions like this:

//plugin.js

app.post('/', async (req, reply) => {
const ctx = req.createPlatformaticCtx()

await app.platformatic.entities.movie.find({
where: { /*...*/ },
ctx
})
})

Skip authorization rules

In custom plugins, it's possible to skip the authorization rules on entities programmatically by setting the skipAuth flag to true or not passing a ctx, e.g.:

// this works even if the user's role doesn't have the `find` permission.
const result = await app.platformatic.entities.page.find({skipAuth: true, ...})

This has the same effect:

// this works even if the user's role doesn't have the `find` permission
const result = await app.platformatic.entities.page.find() // no `ctx`

This is useful for custom plugins for which the authentication is not necessary, so there is no user role set when invoked.

info

Skip authorization rules is not possible on the automatically generated REST and GraphQL APIs.

Avoid repetition of the same rule multiple times

Very often we end up writing the same rules over and over again. +Instead, it's possible to condense the rule for multiple entities on a single entry:

 app.register(auth, {
jwt: {
secret: 'supersecret'
},
roleKey: 'X-PLATFORMATIC-ROLE',
anonymousRole: 'anonymous',
rules: [{
role: 'anonymous',
entities: ['category', 'page'],
find: true,
delete: false,
save: false
}]
})
+ + + + \ No newline at end of file diff --git a/docs/0.41.2/reference/db/authorization/strategies/index.html b/docs/0.41.2/reference/db/authorization/strategies/index.html new file mode 100644 index 00000000000..2a3d300dba5 --- /dev/null +++ b/docs/0.41.2/reference/db/authorization/strategies/index.html @@ -0,0 +1,40 @@ + + + + + +Strategies | Platformatic Open Source Software + + + + + +
+
Version: 0.41.2

Strategies

Introduction

Platformatic DB supports the following authorization strategies:

JSON Web Token (JWT)

The JSON Web Token (JWT) authorization strategy is built on top +of the @fastify/jwt Fastify plugin.

Platformatic DB JWT integration

To configure it, the quickest way is to pass a shared secret in your +Platformatic DB configuration file, for example:

platformatic.db.json
{
"authorization": {
"jwt": {
"secret": "<shared-secret>"
}
}
}

By default @fastify/jwt looks for a JWT in an HTTP request's Authorization +header. This requires HTTP requests to the Platformatic DB API to include an +Authorization header like this:

Authorization: Bearer <token>

See the @fastify/jwt documentation +for all of the available configuration options.

JSON Web Key Sets (JWKS)

The JWT authorization strategy includes support for JSON Web Key Sets.

To configure it:

platformatic.db.json
{
"authorization": {
"jwt": {
"jwks": {
"allowedDomains": [
"https://ISSUER_DOMAIN"
]
}
}
}
}

When a JSON Web Token is included in a request to Platformatic DB, it retrieves the +correct public key from https:/ISSUER_DOMAIN/.well-known/jwks.json and uses it to +verify the JWT signature. The token carries all the informations, like the kid, +which is the key id used to sign the token itself, so no other configuration is required.

JWKS can be enabled without any options:

platformatic.db.json
{
"authorization": {
"jwt": {
"jwks": true
}
}
}

When configured like this, the JWK URL is calculated from the iss (issuer) field of JWT, so +every JWT token from an issuer that exposes a valid JWKS token will pass the validation. +This configuration should only be used in development, while +in every other case the allowedDomains option should be specified.

Any option supported by the get-jwks +library can be specified in the authorization.jwt.jwks object.

JWT Custom Claim Namespace

JWT claims can be namespaced to avoid name collisions. If so, we will receive tokens +with custom claims such as: https://platformatic.dev/X-PLATFORMATIC-ROLE +(where https://platformatic.cloud/ is the namespace). +If we want to map these claims to user metadata removing our namespace, we can +specify the namespace in the JWT options:

platformatic.db.json
{
"authorization": {
"jwt": {
"namespace": "https://platformatic.dev/"
}
}
}

With this configuration, the https://platformatic.dev/X-PLATFORMATIC-ROLE claim +is mapped to X-PLATFORMATIC-ROLE user metadata.

Webhook

Platformatic DB can use a webhook to authenticate requests.

Platformatic DB Webhook integration

In this case, the URL is configured on authorization:

platformatic.db.json
{
"authorization": {
"webhook": {
"url": "<webhook url>"
}
}
}

When a request is received, Platformatic sends a POST to the webhook, replicating +the same body and headers, except for:

  • host
  • connection

In the Webhook case, the HTTP response contains the roles/user information as HTTP headers.

HTTP headers (development only)

danger

Passing an admin API key via HTTP headers is highly insecure and should only be used +during development or within protected networks.

If a request has X-PLATFORMATIC-ADMIN-SECRET HTTP header set with a valid adminSecret +(see configuration reference) the +role is set automatically as platformatic-admin, unless a different role is set for +user impersonation (which is disabled if JWT or Webhook are set, see below).

Platformatic DB HTTP Headers

Also, the following rule is automatically added to every entity, allowing the user +that presented the adminSecret to perform any operation on any entity:

{
"role": "platformatic-admin",
"find": false,
"delete": false,
"save": false
}
+ + + + \ No newline at end of file diff --git a/docs/0.41.2/reference/db/authorization/user-roles-metadata/index.html b/docs/0.41.2/reference/db/authorization/user-roles-metadata/index.html new file mode 100644 index 00000000000..81c33146fa0 --- /dev/null +++ b/docs/0.41.2/reference/db/authorization/user-roles-metadata/index.html @@ -0,0 +1,31 @@ + + + + + +User Roles & Metadata | Platformatic Open Source Software + + + + + +
+
Version: 0.41.2

User Roles & Metadata

Introduction

Roles and user information are passed to Platformatic DB from an external +authentication service as a string (JWT claims or HTTP headers). We refer to +this data as user metadata.

Roles

Users can have a list of roles associated with them. These roles can be specified +in an X-PLATFORMATIC-ROLE property as a list of comma separated role names +(the key name is configurable).

Note that role names are just strings.

Reserved roles

Some special role names are reserved by Platformatic DB:

  • platformatic-admin : this identifies a user who has admin powers
  • anonymous: set automatically when no roles are associated

Anonymous role

If a user has no role, the anonymous role is assigned automatically. It's possible +to specify rules to apply to users with this role:

    {
"role": "anonymous",
"entity": "page",
"find": false,
"delete": false,
"save": false
}

In this case, a user that has no role or explicitly has the anonymous role +cannot perform any operations on the page entity.

Role impersonation

If a request includes a valid X-PLATFORMATIC-ADMIN-SECRET HTTP header it is +possible to impersonate a user roles. The roles to impersonate can be specified +by sending a X-PLATFORMATIC-ROLE HTTP header containing a comma separated list +of roles.

note

When JWT or Webhook are set, user role impersonation is not enabled, and the role +is always set as platfomatic-admin automatically if the X-PLATFORMATIC-ADMIN-SECRET +HTTP header is specified.

Role configuration

The roles key in user metadata defaults to X-PLATFORMATIC-ROLE. It's possible to change it using the roleKey field in configuration. Same for the anonymous role, which value can be changed using anonymousRole.

 "authorization": {
"roleKey": "X-MYCUSTOM-ROLE_KEY",
"anonymousRole": "anonym",
"rules": [
...
]
}

User metadata

User roles and other user data, such as userId, are referred to by Platformatic +DB as user metadata.

User metadata is parsed from an HTTP request and stored in a user object on the +Fastify request object. This object is populated on-demand, but it's possible +to populate it explicity with await request.setupDBAuthorizationUser().

+ + + + \ No newline at end of file diff --git a/docs/0.41.2/reference/db/configuration/index.html b/docs/0.41.2/reference/db/configuration/index.html new file mode 100644 index 00000000000..3fde3c4b33c --- /dev/null +++ b/docs/0.41.2/reference/db/configuration/index.html @@ -0,0 +1,59 @@ + + + + + +Configuration | Platformatic Open Source Software + + + + + +
+
Version: 0.41.2

Configuration

Platformatic DB is configured with a configuration file. It supports the use +of environment variables as setting values with configuration placeholders.

Configuration file

If the Platformatic CLI finds a file in the current working directory matching +one of these filenames, it will automatically load it:

  • platformatic.db.json
  • platformatic.db.json5
  • platformatic.db.yml or platformatic.db.yaml
  • platformatic.db.tml or platformatic.db.toml

Alternatively, a --config option with a configuration +filepath can be passed to most platformatic db CLI commands.

The configuration examples in this reference use JSON.

Supported formats

FormatExtensions
JSON.json
JSON5.json5
YAML.yml, .yaml
TOML.tml

Comments are supported by the JSON5, YAML and TOML file formats.

Settings

Configuration settings are organised into the following groups:

Sensitive configuration settings, such as a database connection URL that contains +a password, should be set using configuration placeholders.

db

A required object with the following settings:

  • connectionString (required, string) — Database connection URL.

    • Example: postgres://user:password@my-database:5432/db-name
  • schema (array of string) - Currently supported only for postgres, schemas used tolook for entities. If not provided, the default public schema is used.

    Examples

  "db": {
"connectionString": "(...)",
"schema": [
"schema1", "schema2"
],
...

},

  • Platformatic DB supports MySQL, MariaDB, PostgreSQL and SQLite.

  • graphql (boolean or object, default: true) — Controls the GraphQL API interface, with optional GraphiQL UI.

    Examples

    Enables GraphQL support

    {
    "db": {
    ...
    "graphql": true
    }
    }

    Enables GraphQL support with GraphiQL

    {
    "db": {
    ...
    "graphql": {
    "graphiql": true
    }
    }
    }

    It's possible to selectively ignore entites:

    {
    "db": {
    ...
    "graphql": {
    "ignore": {
    "categories": true
    }
    }
    }
    }

    It's possible to selectively ignore fields:

    {
    "db": {
    ...
    "graphql": {
    "ignore": {
    "categories": {
    "name": true
    }
    }
    }
    }
    }

    It's possible to add a custom GraphQL schema during the startup:

    {
    "db": {
    ...
    "graphql": {
    "schemaPath": "path/to/schema.graphql"
    }
    }
    }
    }
  • openapi (boolean or object, default: true) — Enables OpenAPI REST support.

    • If value is an object, all OpenAPI v3 allowed properties can be passed. Also a prefix property can be passed to set the OpenAPI prefix.
    • Platformatic DB uses @fastify/swagger under the hood to manage this configuration.

    Examples

    Enables OpenAPI

    {
    "db": {
    ...
    "openapi": true
    }
    }

    Enables OpenAPI with prefix

    {
    "db": {
    ...
    "openapi": {
    "prefix": "/api"
    }
    }
    }

    Enables OpenAPI with options

    {
    "db": {
    ...
    "openapi": {
    "info": {
    "title": "Platformatic DB",
    "description": "Exposing a SQL database as REST"
    }
    }
    }
    }

    You can for example add the security section, so that Swagger will allow you to add the authentication header to your requests. +In the following code snippet, we're adding a Bearer token in the form of a JWT:

    {
    "db": {
    ...
    "openapi": {
    ...
    "security": [{ "bearerAuth": [] }],
    "components": {
    "securitySchemes": {
    "bearerAuth": {
    "type": "http",
    "scheme": "bearer",
    "bearerFormat": "JWT"
    }
    }
    }
    }
    }
    }

    It's possible to selectively ignore entites:

    {
    "db": {
    ...
    "openapi": {
    "ignore": {
    "categories": true
    }
    }
    }
    }

    It's possible to selectively ignore fields:

    {
    "db": {
    ...
    "openapi": {
    "ignore": {
    "categories": {
    "name": true
    }
    }
    }
    }
    }
  • ignore (object) — Key/value object that defines which database tables should not be mapped as API entities.

    Examples

    {
    "db": {
    ...
    "ignore": {
    "versions": true // "versions" table will be not mapped with GraphQL/REST APIs
    }
    }
    }
  • events (boolean or object, default: true) — Controls the support for events published by the SQL mapping layer. +If enabled, this option add support for GraphQL Subscription over WebSocket. By default it uses an in-process message broker. +It's possible to configure it to use Redis instead.

    Examples

    {
    "db": {
    ...
    "events": {
    "connectionString": "redis://:password@redishost.com:6380/"
    }
    }
    }
  • schemalock (boolean or object, default: false) — Controls the caching of the database schema on disk. +If set to true the database schema metadata is stored inside a schema.lock file. +It's also possible to configure the location of that file by specifying a path, like so:

    Examples

    {
    "db": {
    ...
    "schemalock": {
    "path": "./dbmetadata"
    }
    }
    }

    Starting Platformatic DB or running a migration will automatically create the schemalock file.

metrics

Configuration for a Prometheus server that will export monitoring metrics +for the current server instance. It uses fastify-metrics +under the hood.

This setting can be a boolean or an object. If set to true the Prometheus server will listen on http://0.0.0.0:9090.

Supported object properties:

  • hostname (string) — The hostname where Prometheus server will listen for connections.
  • port (number) — The port where Prometheus server will listen for connections.
  • auth (object) — Basic Auth configuration. username and password are required here +(use environment variables).

migrations

Configures Postgrator to run migrations against the database.

An optional object with the following settings:

  • dir (required, string): Relative path to the migrations directory.
  • autoApply (boolean, default: false): Automatically apply migrations when Platformatic DB server starts.

plugins

An optional object that defines the plugins loaded by Platformatic DB.

  • paths (required, array): an array of paths (string) +or an array of objects composed as follows,
    • path (string): Relative path to plugin's entry point.
    • options (object): Optional plugin options.
    • encapsulate (boolean): if the path is a folder, it instruct Platformatic to not +encapsulate those plugins, +allowing decorators and hooks to be shared across all routes.
    • maxDepth (integer): if the path is a folder, it limits the depth to load the content from.
  • typescript (boolean or object): enable TypeScript compilation. A tsconfig.json file is required in the same folder.
{
"plugins": {
"paths": [{
"path": "./my-plugin.js",
"options": {
"foo": "bar"
}
}]
}
}

typescript compilation options

The typescript option can also be an object to customize the compilation. Here are the supported options:

  • enabled (boolean): enables compilation
  • tsConfig (string): path to the tsconfig.json file relative to the configuration
  • outDir (string): the output directory of tsconfig.json, in case tsconfig.json is not available +and and enabled is set to false (procution build)
  • flags (array of string): flags to be passed to tsc. Overrides tsConfig. +

Example:

{
"plugins": {
"paths": [{
"path": "./my-plugin.js",
"options": {
"foo": "bar"
}
}],
"typescript": {
"enabled": false,
"tsConfig": "./path/to/tsconfig.json",
"outDir": "dist"
}
}
}

watch

Disable watching for file changes if set to false. It can also be customized with the following options:

  • ignore (string[], default: null): List of glob patterns to ignore when watching for changes. If null or not specified, ignore rule is not applied. Ignore option doesn't work for typescript files.

  • allow (string[], default: ['*.js', '**/*.js']): List of glob patterns to allow when watching for changes. If null or not specified, allow rule is not applied. Allow option doesn't work for typescript files.

    Example

    {
    "watch": {
    "ignore": ["*.mjs", "**/*.mjs"],
    "allow": ["my-plugin.js", "plugins/*.js"]
    }
    }

server

A required object with the following settings:

  • hostname (required, string) — Hostname where Platformatic DB server will listen for connections.

  • port (required, number) — Port where Platformatic DB server will listen for connections.

  • healthCheck (boolean or object) — Enables the health check endpoint.

    • Powered by @fastify/under-pressure.
    • The value can be an object, used to specify the interval between checks in milliseconds (default: 5000)

    Example

    {
    "server": {
    ...
    "healthCheck": {
    "interval": 2000
    }
    }
    }
  • cors (object) — Configuration for Cross-Origin Resource Sharing (CORS) headers.

    • All options will be passed to the @fastify/cors plugin. In order to specify a RegExp object, you can pass { regexp: 'yourregexp' }, +it will be automatically converted
  • https (object) - Configuration for HTTPS supporting the following options.

    • key (required, string, object, or array) - If key is a string, it specifies the private key to be used. If key is an object, it must have a path property specifying the private key file. Multiple keys are supported by passing an array of keys.
    • cert (required, string, object, or array) - If cert is a string, it specifies the certificate to be used. If cert is an object, it must have a path property specifying the certificate file. Multiple certificates are supported by passing an array of keys.
  • logger (object) -- the logger configuration.

  • pluginTimeout (integer) -- the number of milliseconds to wait for a Fastify plugin to load

  • bodyLimit (integer) -- the maximum request body size in bytes

  • maxParamLength (integer) -- the maximum length of a request parameter

  • caseSensitive (boolean) -- if true, the router will be case sensitive

  • ignoreTrailingSlash (boolean) -- if true, the router will ignore the trailing slash

  • ignoreTrailingSlash (boolean) -- if true, the router will ignore the trailing slash

  • connectionTimeout (integer) -- the milliseconds to wait for a new HTTP request

  • keepAliveTimeout (integer) -- the milliseconds to wait for a keep-alive HTTP request

  • maxRequestsPerSocket (integer) -- the maximum number of requests per socket

  • forceCloseConnections (boolean or "idle") -- if true, the server will close all connections when it is closed

  • requestTimeout (integer) -- the milliseconds to wait for a request to be completed

  • disableRequestLogging (boolean) -- if true, the request logger will be disabled

  • exposeHeadRoutes (boolean) -- if true, the router will expose HEAD routes

  • serializerOpts (object) -- the serializer options

  • requestIdHeader (string or false) -- the name of the header that will contain the request id

  • requestIdLogLabel (string) -- Defines the label used for the request identifier when logging the request. default: 'reqId'

  • jsonShorthand (boolean) -- default: true -- visit fastify docs for more details

  • trustProxy (boolean or integer or string or String[]) -- default: false -- visit fastify docs for more details

tip

See the fastify docs for more details.

authorization

An optional object with the following settings:

  • adminSecret (string): A secret that should be sent in an +x-platformatic-admin-secret HTTP header when performing GraphQL/REST API +calls. Use an environment variable placeholder +to securely provide the value for this setting.
  • roleKey (string, default: X-PLATFORMATIC-ROLE): The name of the key in user +metadata that is used to store the user's roles. See Role configuration.
  • anonymousRole (string, default: anonymous): The name of the anonymous role. See Role configuration.
  • jwt (object): Configuration for the JWT authorization strategy. +Any option accepted by @fastify/jwt +can be passed in this object.
  • webhook (object): Configuration for the Webhook authorization strategy.
    • url (required, string): Webhook URL that Platformatic DB will make a +POST request to.
  • rules (array): Authorization rules that describe the CRUD actions that +users are allowed to perform against entities. See Rules +documentation.
note

If an authorization object is present, but no rules are specified, no CRUD +operations are allowed unless adminSecret is passed.

Example

platformatic.db.json
{
"authorization": {
"jwt": {
"secret": "{PLT_AUTHORIZATION_JWT_SECRET}"
},
"rules": [
...
]
}
}

telemetry

Open Telemetry is optionally supported with these settings:

  • serviceName (required, string) — Name of the service as will be reported in open telemetry.
  • version (string) — Optional version (free form)
  • skip (array). Optional list of operations to skip when exporting telemetry defined object with properties:
    • method: GET, POST, PUT, DELETE, PATCH, HEAD, OPTIONS, TRACE
    • path. e.g.: /documentation/json
  • exporter (object or array) — Exporter configuration. If not defined, the exporter defaults to console. If an array of objects is configured, every object must be a valid exporter object. The exporter object has the following properties:
    • type (string) — Exporter type. Supported values are console, otlp, zipkin and memory (default: console). memory is only supported for testing purposes.
    • options (object) — These options are supported:
      • url (string) — The URL to send the telemetry to. Required for otlp exporter. This has no effect on console and memory exporters.
      • headers (object) — Optional headers to send with the telemetry. This has no effect on console and memory exporters.

Note that OTLP traces can be consumed by different solutions, like Jaeger. Here the full list.

Example

{
"telemetry": {
"serviceName": "test-service",
"exporter": {
"type": "otlp",
"options": {
"url": "http://localhost:4318/v1/traces"
}
}
}
}

Environment variable placeholders

The value for any configuration setting can be replaced with an environment variable +by adding a placeholder in the configuration file, for example {PLT_SERVER_LOGGER_LEVEL}.

All placeholders in a configuration must be available as an environment variable +and must meet the allowed placeholder name rules.

Example

platformatic.db.json
{
"db": {
"connectionString": "{DATABASE_URL}"
},
"server": {
"logger": {
"level": "{PLT_SERVER_LOGGER_LEVEL}"
},
"port": "{PORT}"
}
}

Platformatic will replace the placeholders in this example with the environment +variables of the same name.

Setting environment variables

If a .env file exists it will automatically be loaded by Platformatic using +dotenv. For example:

.env
PLT_SERVER_LOGGER_LEVEL=info
PORT=8080

The .env file must be located in the same folder as the Platformatic configuration +file or in the current working directory.

Environment variables can also be set directly on the commmand line, for example:

PLT_SERVER_LOGGER_LEVEL=debug npx platformatic db

Allowed placeholder names

Only placeholder names prefixed with PLT_, or that are in this allow list, will be +dynamically replaced in the configuration file:

  • PORT
  • DATABASE_URL

This restriction is to avoid accidentally exposing system environment variables. +An error will be raised by Platformatic if it finds a configuration placeholder +that isn't allowed.

The default allow list can be extended by passing a --allow-env CLI option with a +comma separated list of strings, for example:

npx platformatic db start --allow-env=HOST,SERVER_LOGGER_LEVEL
# OR
npx platformatic start --allow-env=HOST,SERVER_LOGGER_LEVEL

If --allow-env is passed as an option to the CLI, it will be merged with the +default allow list.

Sample Configuration

This is a bare minimum configuration for Platformatic DB. Uses a local ./db.sqlite SQLite database, with OpenAPI and GraphQL support.

Server will listen to http://127.0.0.1:3042

{
"server": {
"hostname": "127.0.0.1",
"port": "3042"
},
"db": {
"connectionString": "sqlite://./db.sqlite",
"graphiql": true,
"openapi": true,
"graphql": true
}
}
+ + + + \ No newline at end of file diff --git a/docs/0.41.2/reference/db/introduction/index.html b/docs/0.41.2/reference/db/introduction/index.html new file mode 100644 index 00000000000..0766faa1ca2 --- /dev/null +++ b/docs/0.41.2/reference/db/introduction/index.html @@ -0,0 +1,25 @@ + + + + + +Platformatic DB | Platformatic Open Source Software + + + + + +
+
Version: 0.41.2

Platformatic DB

Platformatic DB is an HTTP server that provides a flexible set of tools for +building robust APIs with Node.js.

For a high level overview of how Platformatic DB works, please reference the +Architecture guide.

info

Platformatic DB is currently in public beta.

Features

info

Get up and running in 2 minutes using our +Quick Start Guide

Supported databases

DatabaseVersion
SQLite3.
PostgreSQL>= 15
MySQL>= 5.7
MariaDB>= 10.11

The required database driver is automatically inferred and loaded based on the +value of the connectionString +configuration setting.

Public beta

Platformatic DB is in public beta. You can use it in production, but it's quite +likely that you'll encounter significant bugs.

If you run into a bug or have a suggestion for improvement, please +raise an issue on GitHub.

+ + + + \ No newline at end of file diff --git a/docs/0.41.2/reference/db/logging/index.html b/docs/0.41.2/reference/db/logging/index.html new file mode 100644 index 00000000000..f6560a88606 --- /dev/null +++ b/docs/0.41.2/reference/db/logging/index.html @@ -0,0 +1,25 @@ + + + + + +Logging | Platformatic Open Source Software + + + + + +
+
Version: 0.41.2

Logging

Platformatic DB uses a low overhead logger named Pino +to output structured log messages.

Logger output level

By default the logger output level is set to info, meaning that all log messages +with a level of info or above will be output by the logger. See the +Pino documentation +for details on the supported log levels.

The logger output level can be overriden by adding a logger object to the server +configuration settings group:

platformatic.db.json
{
"server": {
"logger": {
"level": "error"
},
...
},
...
}

Log formatting

If you run Platformatic DB in a terminal, where standard out (stdout) +is a TTY:

  • pino-pretty is automatically used +to pretty print the logs and make them easier to read during development.
  • The Platformatic logo is printed (if colors are supported in the terminal emulator)

Example:

$ npx platformatic db start




/////////////
///// /////
/// ///
/// ///
/// ///
&& /// /// &&
&&&&&& /// /// &&&&&&
&&&& /// /// &&&&
&&& /// /// &&&&&&&&&&&&
&&& /// /////// //// && &&&&&
&& /// /////////////// &&&
&&& /// /// &&&
&&& /// // &&
&&& /// &&
&&& /// &&&
&&&& /// &&&
&&&&& /// &&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&
///
///
///
///
///
///


[11:20:33.466] INFO (337606): server listening
url: "http://127.0.0.1:3042"

If stdout is redirected to a non-TTY, the logo is not printed and the logs are +formatted as newline-delimited JSON:

$ npx platformatic db start | head
{"level":30,"time":1665566628973,"pid":338365,"hostname":"darkav2","url":"http://127.0.0.1:3042","msg":"server listening"}
+ + + + \ No newline at end of file diff --git a/docs/0.41.2/reference/db/migrations/index.html b/docs/0.41.2/reference/db/migrations/index.html new file mode 100644 index 00000000000..7bb15c1aa6f --- /dev/null +++ b/docs/0.41.2/reference/db/migrations/index.html @@ -0,0 +1,17 @@ + + + + + +Migrations | Platformatic Open Source Software + + + + + +
+
Version: 0.41.2

Migrations

It uses Postgrator under the hood to run migrations. Please refer to the Postgrator documentation for guidance on writing migration files.

In brief, you should create a file structure like this

migrations/
|- 001.do.sql
|- 001.undo.sql
|- 002.do.sql
|- 002.undo.sql
|- 003.do.sql
|- 003.undo.sql
|- 004.do.sql
|- 004.undo.sql
|- ... and so on

Postgrator uses a table in your schema, to store which migrations have been already processed, so that only new ones will be applied at every server start.

You can always rollback some migrations specifing what version you would like to rollback to.

Example

$ platformatic db migrations apply --to 002

Will execute 004.undo.sql, 003.undo.sql in this order. If you keep those files in migrations directory, when the server restarts it will execute 003.do.sql and 004.do.sql in this order if the autoApply value is true, or you can run the db migrations apply command.

It's also possible to rollback a single migration with -r:

$ platformatic db migrations apply -r 

How to run migrations

There are two ways to run migrations in Platformatic DB. They can be processed automatically when the server starts if the autoApply value is true, or you can just run the db migrations apply command.

In both cases you have to edit your config file to tell Platformatic DB where are your migration files.

Automatically on server start

To run migrations when Platformatic DB starts, you need to use the config file root property migrations.

There are two options in the "migrations" property

  • dir (required) the directory where the migration files are located. It will be relative to the config file path.
  • autoApply a boolean value that tells Platformatic DB to auto-apply migrations or not (default: false)

Example

{
...
"migrations": {
"dir": "./path/to/migrations/folder",
"autoApply": false
}
}

Manually with the CLI

See documentation about db migrations apply command

In short:

  • be sure to define a correct migrations.dir folder under the config on platformatic.db.json
  • get the MIGRATION_NUMBER (f.e. if the file is named 002.do.sql will be 002)
  • run npx platformatic db migrations apply --to MIGRATION_NUMBER
+ + + + \ No newline at end of file diff --git a/docs/0.41.2/reference/db/plugin/index.html b/docs/0.41.2/reference/db/plugin/index.html new file mode 100644 index 00000000000..4e1c1a568d6 --- /dev/null +++ b/docs/0.41.2/reference/db/plugin/index.html @@ -0,0 +1,19 @@ + + + + + +Plugin | Platformatic Open Source Software + + + + + +
+
Version: 0.41.2

Plugin

If you want to extend Platformatic DB features, it is possible to register a plugin, which will be in the form of a standard Fastify plugin.

The config file will specify where the plugin file is located as the example below:

{
...
"plugins": {
"paths": ["./plugin/index.js"]
}
}

The paths are relative to the config file path.

Once the config file is set up, you can write your plugin to extend Platformatic DB API or write your custom business logic.

You should export an async function which receives a parameters

  • app (FastifyInstance) that is the main fastify instance running Platformatic DB
  • opts all the options specified in the config file after path
  • You can always access Platformatic data mapper through app.platformatic property.
info

To make sure that a user has the appropriate set of permissions to perform any action on an entity the context should be passed to the entity mapper operation like this:

app.post('/', async (req, reply) => {
const ctx = req.createPlatformaticCtx()

await app.platformatic.entities.movies.find({
where: { /*...*/ },
ctx
})
})

Check some examples.

Hot Reload

Plugin file is being watched by fs.watch function.

You don't need to reload Platformatic DB server while working on your plugin. Every time you save, the watcher will trigger a reload event and the server will auto-restart and load your updated code.

tip

At this time, on Linux, file watch in subdirectories is not supported due to a Node.js limitation (documented here).

Directories

The path can also be a directory. In that case, the directory will be loaded with @fastify/autoload.

Consider the following directory structure:

├── routes
│ ├── foo
│ │ ├── something.js
│ │ └── bar
│ │ └── baz.js
│ ├── single-plugin
│ │ └── utils.js
│ └── another-plugin.js
└── platformatic.service.json

By default the folder will be added as a prefix to all the routes defined within them. +See the autoload documentation for all the options to customize this behavior.

Multiple plugins

Multiple plugins can be loaded in parallel by specifying an array:

{
...
"plugins": {
"paths": [{
"path": "./plugin/index.js"
}, {
"path": "./routes/"
}]
}
}

fastify.swagger()

TypeScript and autocompletion

If you want to access any of the types provided by Platformatic DB, generate them using the platformatic db types command. +This will create a global.d.ts file that you can now import everywhere, like so:

/// <references <types="./global.d.ts" />

Remember to adjust the path to global.d.ts.

Plugin definition with TypeScript

Here is an example of writing a plugin in TypeScript:

/// <reference types="./global.d.ts" />
import { FastifyInstance, FastifyPluginOptions } from 'fastify'

export default async function (fastify: FastifyInstance, opts: FastifyPluginOptions) {
}

Note that you need to add the "plugins": { "typescript": true } configuration to your platformatic.service.json.

+ + + + \ No newline at end of file diff --git a/docs/0.41.2/reference/db/programmatic/index.html b/docs/0.41.2/reference/db/programmatic/index.html new file mode 100644 index 00000000000..d2b44298bc0 --- /dev/null +++ b/docs/0.41.2/reference/db/programmatic/index.html @@ -0,0 +1,17 @@ + + + + + +Programmatic API | Platformatic Open Source Software + + + + + +
+
Version: 0.41.2

Programmatic API

It's possible to start an instance of Platformatic DB from JavaScript.

import { buildServer } from '@platformatic/db'

const app = await buildServer('/path/to/platformatic.db.json')

await app.start() // this will start our server

console.log('URL', app.url)

const res = await fetch(app.url)
console.log(await res.json())

// do something

await app.close()

It is also possible to customize the configuration:

import { buildServer } from '@platformatic/db'

const app = await buildServer({
server: {
hostname: '127.0.0.1',
port: 0
},
db: {
connectionString: 'sqlite://test.sqlite'
},
})

await app.start() // this will start our server

console.log('URL', app.url)

const res = await fetch(app.url)
console.log(await res.json())

// do something

await app.close()

For more details on how this is implemented, read Platformatic Service Programmatic API.

API

buildServer(config)

Returns an instance of the restartable application

RestartableApp

.start()

Listen to the hostname/port combination specified in the config.

.restart()

Restart the Fastify application

.close()

Stops the application.

+ + + + \ No newline at end of file diff --git a/docs/0.41.2/reference/db/schema-support/index.html b/docs/0.41.2/reference/db/schema-support/index.html new file mode 100644 index 00000000000..721b8fdef1d --- /dev/null +++ b/docs/0.41.2/reference/db/schema-support/index.html @@ -0,0 +1,21 @@ + + + + + +Schema support | Platformatic Open Source Software + + + + + +
+
Version: 0.41.2

Schema support

It's possible to specify the schemas where the tables are located (if the database supports schemas). +PlatformaticDB will inspect this schemas to create the entities

Example

CREATE SCHEMA IF NOT EXISTS "test1";
CREATE TABLE IF NOT EXISTS test1.movies (
id INTEGER PRIMARY KEY,
title TEXT NOT NULL
);

CREATE SCHEMA IF NOT EXISTS "test2";
CREATE TABLE IF NOT EXISTS test2.users (
id INTEGER PRIMARY KEY,
title TEXT NOT NULL
);

The schemas must be specified in configuration in the schema section. +Note that if we use schemas and migrations, we must specify the schema in the migrations table as well +(with postgresql, we assume we use the default public schema).

  ...
"db": {
"connectionString": "(...)",
"schema": [
"test1", "test2"
],
"ignore": {
"versions": true
}
},
"migrations": {
"dir": "migrations",
"table": "test1.versions"
},

...

The entities name are then generated in the form schemaName + entityName, PascalCase (this is necessary to avoid name collisions in case there are tables with same name in different schemas). +So for instance for the example above we generate the Test1Movie and Test2User entities.

info

Please pay attention to the entity names when using schema, these are also used to setup authorization rules

+ + + + \ No newline at end of file diff --git a/docs/0.41.2/reference/runtime/configuration/index.html b/docs/0.41.2/reference/runtime/configuration/index.html new file mode 100644 index 00000000000..142138aca7f --- /dev/null +++ b/docs/0.41.2/reference/runtime/configuration/index.html @@ -0,0 +1,67 @@ + + + + + +Configuration | Platformatic Open Source Software + + + + + +
+
Version: 0.41.2

Configuration

Platformatic Runtime is configured with a configuration file. It supports the +use of environment variables as setting values with configuration placeholders.

Configuration file

If the Platformatic CLI finds a file in the current working directory matching +one of these filenames, it will automatically load it:

  • platformatic.runtime.json
  • platformatic.runtime.json5
  • platformatic.runtime.yml or platformatic.runtime.yaml
  • platformatic.runtime.tml or platformatic.runtime.toml

Alternatively, a --config option with a configuration +filepath can be passed to most platformatic runtime CLI commands.

The configuration examples in this reference use JSON.

Supported formats

FormatExtensions
JSON.json
JSON5.json5
YAML.yml, .yaml
TOML.tml

Comments are supported by the JSON5, YAML and TOML file formats.

Settings

Configuration settings are organized into the following groups:

Configuration settings containing sensitive data should be set using +configuration placeholders.

The autoload and services settings can be used together, but at least one +of them must be provided. When the configuration file is parsed, autoload +configuration is translated into services configuration.

autoload

The autoload configuration is intended to be used with monorepo applications. +autoload is an object with the following settings:

  • path (required, string) - The path to a directory containing the +microservices to load. In a traditional monorepo application, this directory is +typically named packages.
  • exclude (array of strings) - Child directories inside of path that +should not be processed.
  • mappings (object) - Each microservice is given an ID and is expected +to have a Platformatic configuration file. By default the ID is the +microservice's directory name, and the configuration file is expected to be a +well-known Platformatic configuration file. mappings can be used to override +these default values.
    • id (required, string) - The overridden ID. This becomes the new +microservice ID.
    • config (required**, string) - The overridden configuration file +name. This is the file that will be used when starting the microservice.

services

services is an array of objects that defines the microservices managed by the +runtime. Each service object supports the following settings:

  • id (required, string) - A unique identifier for the microservice. +When working with the Platformatic Composer, this value corresponds to the id +property of each object in the services section of the config file. When +working with client objects, this corresponds to the optional serviceId +property or the name field in the client's package.json file if a +serviceId is not explicitly provided.
  • path (required, string) - The path to the directory containing +the microservice.
  • config (required, string) - The configuration file used to start +the microservice.

entrypoint

The Platformatic Runtime's entrypoint is a microservice that is exposed +publicly. This value must be the ID of a service defined via the autoload or +services configuration.

hotReload

An optional boolean, defaulting to false, indicating if hot reloading should +be enabled for the runtime. If this value is set to false, it will disable +hot reloading for any microservices managed by the runtime. If this value is +true, hot reloading for individual microservices is managed by the +configuration of that microservice.

danger

While hot reloading is useful for development, it is not recommended for use in +production.

allowCycles

An optional boolean, defaulting to false, indicating if dependency cycles +are allowed between microservices managed by the runtime. When the Platformatic +Runtime parses the provided configuration, it examines the clients of each +microservice, as well as the services of Platformatic Composer applications to +build a dependency graph. A topological sort is performed on this dependency +graph so that each service is started after all of its dependencies have been +started. If there are cycles, the topological sort fails and the Runtime does +not start any applications.

If allowCycles is true, the topological sort is skipped, and the +microservices are started in the order specified in the configuration file.

telemetry

Open Telemetry is optionally supported with these settings:

  • serviceName (required, string) — Name of the service as will be reported in open telemetry. In the runtime case, the name of the services as reported in traces is ${serviceName}-${serviceId}, where serviceId is the id of the service in the runtime.
  • version (string) — Optional version (free form)
  • skip (array). Optional list of operations to skip when exporting telemetry defined object with properties:
    • method: GET, POST, PUT, DELETE, PATCH, HEAD, OPTIONS, TRACE
    • path. e.g.: /documentation/json
  • exporter (object or array) — Exporter configuration. If not defined, the exporter defaults to console. If an array of objects is configured, every object must be a valid exporter object. The exporter object has the following properties:
    • type (string) — Exporter type. Supported values are console, otlp, zipkin and memory (default: console). memory is only supported for testing purposes.
    • options (object) — These options are supported:
      • url (string) — The URL to send the telemetry to. Required for otlp exporter. This has no effect on console and memory exporters.
      • headers (object) — Optional headers to send with the telemetry. This has no effect on console and memory exporters.

Note that OTLP traces can be consumed by different solutions, like Jaeger. Here the full list.

Example

{
"telemetry": {
"serviceName": "test-service",
"exporter": {
"type": "otlp",
"options": {
"url": "http://localhost:4318/v1/traces"
}
}
}
}

Environment variable placeholders

The value for any configuration setting can be replaced with an environment +variable by adding a placeholder in the configuration file, for example +{PLT_ENTRYPOINT}.

All placeholders in a configuration must be available as an environment +variable and must meet the +allowed placeholder name rules.

Setting environment variables

If a .env file exists it will automatically be loaded by Platformatic using +dotenv. For example:

.env
PLT_ENTRYPOINT=service

The .env file must be located in the same folder as the Platformatic +configuration file or in the current working directory.

Environment variables can also be set directly on the commmand line, for example:

PLT_ENTRYPOINT=service npx platformatic runtime

Allowed placeholder names

Only placeholder names prefixed with PLT_, or that are in this allow list, +will be dynamically replaced in the configuration file:

  • PORT
  • DATABASE_URL

This restriction is to avoid accidentally exposing system environment variables. +An error will be raised by Platformatic if it finds a configuration placeholder +that isn't allowed.

The default allow list can be extended by passing a --allow-env CLI option +with a comma separated list of strings, for example:

npx platformatic runtime --allow-env=HOST,SERVER_LOGGER_LEVEL

If --allow-env is passed as an option to the CLI, it will be merged with the +default allow list.

+ + + + \ No newline at end of file diff --git a/docs/0.41.2/reference/runtime/introduction/index.html b/docs/0.41.2/reference/runtime/introduction/index.html new file mode 100644 index 00000000000..bd6253b8742 --- /dev/null +++ b/docs/0.41.2/reference/runtime/introduction/index.html @@ -0,0 +1,33 @@ + + + + + +Platformatic Runtime | Platformatic Open Source Software + + + + + +
+
Version: 0.41.2

Platformatic Runtime

Platformatic Runtime is an environment for running multiple Platformatic +microservices as a single monolithic deployment unit.

info

Platformatic Runtime is currently in public beta.

Features

Public beta

Platformatic Runtime is in public beta. You can use it in production, but it's quite +likely that you'll encounter significant bugs.

If you run into a bug or have a suggestion for improvement, please +raise an issue on GitHub.

Standalone usage

If you're only interested in the features available in Platformatic Runtime, you can replace platformatic with @platformatic/runtime in the dependencies of your package.json, so that you'll import fewer deps.

Example configuration file

The following configuration file can be used to start a new Platformatic +Runtime project. For more details on the configuration file, see the +configuration documentation.

{
"$schema": "https://platformatic.dev/schemas/v0.26.0/runtime",
"autoload": {
"path": "./packages",
"exclude": ["docs"]
},
"entrypoint": "entrypointApp"
}

TypeScript Compilation

Platformatic Runtime streamlines the compilation of all services built on TypeScript with the command +plt runtime compile. The TypeScript compiler (tsc) is required to be installed separately.

Interservice communication

The Platformatic Runtime allows multiple microservice applications to run +within a single process. Only the entrypoint binds to an operating system +port and can be reached from outside of the runtime.

Within the runtime, all interservice communication happens by injecting HTTP +requests into the running servers, without binding them to ports. This injection +is handled by +fastify-undici-dispatcher.

Each microservice is assigned an internal domain name based on its unique ID. +For example, a microservice with the ID awesome is given the internal domain +of http://awesome.plt.local. The fastify-undici-dispatcher module maps that +domain to the Fastify server running the awesome microservice. Any Node.js +APIs based on Undici, such as fetch(), will then automatically route requests +addressed to awesome.plt.local to the corresponding Fastify server.

+ + + + \ No newline at end of file diff --git a/docs/0.41.2/reference/runtime/programmatic/index.html b/docs/0.41.2/reference/runtime/programmatic/index.html new file mode 100644 index 00000000000..b9272532dfd --- /dev/null +++ b/docs/0.41.2/reference/runtime/programmatic/index.html @@ -0,0 +1,28 @@ + + + + + +Programmatic API | Platformatic Open Source Software + + + + + +
+
Version: 0.41.2

Programmatic API

In many cases it's useful to start Platformatic applications using an API +instead of the command line. The @platformatic/runtime API makes it simple to +work with different application types (e.g. service, db, composer and runtime) without +needing to know the application type a priori.

buildServer()

The buildServer function creates a server from a provided configuration +object or configuration filename. +The config can be of either Platformatic Service, Platformatic DB, +Platformatic Composer or any other application built on top of +Platformatic Service.

import { buildServer } from '@platformatic/runtime'

const app = await buildServer('path/to/platformatic.runtime.json')
const entrypointUrl = await app.start()

// Make a request to the entrypoint.
const res = await fetch(entrypointUrl)
console.log(await res.json())

// Do other interesting things.

await app.close()

It is also possible to customize the configuration:

import { buildServer } from '@platformatic/runtime'

const config = {
// $schema: 'https://platformatic.dev/schemas/v0.39.0/runtime',
// $schema: 'https://platformatic.dev/schemas/v0.39.0/service',
// $schema: 'https://platformatic.dev/schemas/v0.39.0/db',
// $schema: 'https://platformatic.dev/schemas/v0.39.0/composer'
...
}
const app = await buildServer(config)

await app.start()

loadConfig()

The loadConfig function is used to read and parse a configuration file for +an arbitrary Platformatic application.

import { loadConfig } from '@platformatic/runtime'

// Read the config based on command line arguments. loadConfig() will detect
// the application type.
const config = await loadConfig({}, ['-c', '/path/to/platformatic.config.json'])

// Read the config based on command line arguments. The application type can
// be provided explicitly.
const config = await loadConfig(
{},
['-c', '/path/to/platformatic.config.json']
)

// Default config can be specified.
const config = await loadConfig(
{},
['-c', '/path/to/platformatic.config.json'],
{ key: 'value' }
)

start()

The start function loads a configuration, builds a server, and starts the +server. However, the server is not returned.

import { start } from '@platformatic/runtime'

await start(['-c', '/path/to/platformatic.config.json])

startCommand()

The startCommand function is similar to start. However, if an exception +occurs, startCommand logs the error and exits the process. This is different +from start, which throws the exception.

import { startCommand } from '@platformatic/runtime'

await startCommand(['-c', '/path/to/platformatic.config.json])
+ + + + \ No newline at end of file diff --git a/docs/0.41.2/reference/service/configuration/index.html b/docs/0.41.2/reference/service/configuration/index.html new file mode 100644 index 00000000000..0ec484323fa --- /dev/null +++ b/docs/0.41.2/reference/service/configuration/index.html @@ -0,0 +1,38 @@ + + + + + +Configuration | Platformatic Open Source Software + + + + + +
+
Version: 0.41.2

Configuration

Platformatic Service configured with a configuration file. It supports the use +of environment variables as setting values with configuration placeholders.

Configuration file

If the Platformatic CLI finds a file in the current working directory matching +one of these filenames, it will automatically load it:

  • platformatic.service.json
  • platformatic.service.json5
  • platformatic.service.yml or platformatic.service.yaml
  • platformatic.service.tml or platformatic.service.toml

Alternatively, a --config option with a configuration +filepath can be passed to most platformatic service CLI commands.

The configuration examples in this reference use JSON.

Supported formats

FormatExtensions
JSON.json
JSON5.json5
YAML.yml, .yaml
TOML.tml

Comments are supported by the JSON5, YAML and TOML file formats.

Settings

Configuration settings are organised into the following groups:

Sensitive configuration settings, such as a database connection URL that contains +a password, should be set using configuration placeholders.

server

A required object with the following settings:

  • hostname (required, string) — Hostname where Platformatic Service server will listen for connections.

  • port (required, number) — Port where Platformatic Service server will listen for connections.

  • healthCheck (boolean or object) — Enables the health check endpoint.

    • Powered by @fastify/under-pressure.
    • The value can be an object, used to specify the interval between checks in milliseconds (default: 5000)

    Example

    {
    "server": {
    ...
    "healthCheck": {
    "interval": 2000
    }
    }
    }
  • cors (object) — Configuration for Cross-Origin Resource Sharing (CORS) headers.

    • All options will be passed to the @fastify/cors plugin. In order to specify a RegExp object, you can pass { regexp: 'yourregexp' }, +it will be automatically converted.
  • logger (object) -- the logger configuration.

  • pluginTimeout (integer) -- the number of milliseconds to wait for a Fastify plugin to load, see the fastify docs for more details.

  • https (object) - Configuration for HTTPS supporting the following options.

    • key (required, string, object, or array) - If key is a string, it specifies the private key to be used. If key is an object, it must have a path property specifying the private key file. Multiple keys are supported by passing an array of keys.
    • cert (required, string, object, or array) - If cert is a string, it specifies the certificate to be used. If cert is an object, it must have a path property specifying the certificate file. Multiple certificates are supported by passing an array of keys.

metrics

Configuration for a Prometheus server that will export monitoring metrics +for the current server instance. It uses fastify-metrics +under the hood.

This setting can be a boolean or an object. If set to true the Prometheus server will listen on http://0.0.0.0:9090.

Supported object properties:

  • hostname (string) — The hostname where Prometheus server will listen for connections.
  • port (number) — The port where Prometheus server will listen for connections.
  • auth (object) — Basic Auth configuration. username and password are required here +(use environment variables).

plugins

An optional object that defines the plugins loaded by Platformatic Service.

  • paths (required, array): an array of paths (string) +or an array of objects composed as follows,
    • path (string): Relative path to plugin's entry point.
    • options (object): Optional plugin options.
    • encapsulate (boolean): if the path is a folder, it instruct Platformatic to not encapsulate those plugins.
    • maxDepth (integer): if the path is a folder, it limits the depth to load the content from.
  • typescript (boolean or object): enable TypeScript compilation. A tsconfig.json file is required in the same folder.

Example

{
"plugins": {
"paths": [{
"path": "./my-plugin.js",
"options": {
"foo": "bar"
}
}]
}
}

typescript compilation options

The typescript can also be an object to customize the compilation. Here are the supported options:

  • enabled (boolean): enables compilation
  • tsConfig (string): path to the tsconfig.json file relative to the configuration
  • outDir (string): the output directory of tsconfig.json, in case tsconfig.json is not available +and and enabled is set to false (procution build)
  • flags (array of string): flags to be passed to tsc. Overrides tsConfig. +

Example:

{
"plugins": {
"paths": [{
"path": "./my-plugin.js",
"options": {
"foo": "bar"
}
}],
"typescript": {
"enabled": false,
"tsConfig": "./path/to/tsconfig.json",
"outDir": "dist"
}
}
}

watch

Disable watching for file changes if set to false. It can also be customized with the following options:

  • ignore (string[], default: null): List of glob patterns to ignore when watching for changes. If null or not specified, ignore rule is not applied. Ignore option doesn't work for typescript files.

  • allow (string[], default: ['*.js', '**/*.js']): List of glob patterns to allow when watching for changes. If null or not specified, allow rule is not applied. Allow option doesn't work for typescript files.

    Example

    {
    "watch": {
    "ignore": ["*.mjs", "**/*.mjs"],
    "allow": ["my-plugin.js", "plugins/*.js"]
    }
    }

service

Configure @platformatic/service specific settings such as graphql or openapi:

  • graphql (boolean or object, default: false) — Controls the GraphQL API interface, with optional GraphiQL UI.

    Examples

    Enables GraphQL support

    {
    "service": {
    "graphql": true
    }
    }

    Enables GraphQL support with GraphiQL

    {
    "service": {
    "graphql": {
    "graphiql": true
    }
    }
    }
  • openapi (boolean or object, default: false) — Enables OpenAPI REST support.

    • If value is an object, all OpenAPI v3 allowed properties can be passed. Also a prefix property can be passed to set the OpenAPI prefix.
    • Platformatic Service uses @fastify/swagger under the hood to manage this configuration.

    Examples

    Enables OpenAPI

    {
    "service": {
    ...
    "openapi": true
    }
    }

    Enables OpenAPI with prefix

    {
    "service": {
    "openapi": {
    "prefix": "/api"
    }
    }
    }

    Enables OpenAPI with options

    {
    "service": {
    "openapi": {
    "info": {
    "title": "Platformatic Service",
    "description": "Exposing a SQL database as REST"
    }
    }
    }
    }

telemetry

Open Telemetry is optionally supported with these settings:

  • serviceName (required, string) — Name of the service as will be reported in open telemetry.
  • version (string) — Optional version (free form)
  • skip (array). Optional list of operations to skip when exporting telemetry defined object with properties:
    • method: GET, POST, PUT, DELETE, PATCH, HEAD, OPTIONS, TRACE
    • path. e.g.: /documentation/json
  • exporter (object or array) — Exporter configuration. If not defined, the exporter defaults to console. If an array of objects is configured, every object must be a valid exporter object. The exporter object has the following properties:
    • type (string) — Exporter type. Supported values are console, otlp, zipkin and memory (default: console). memory is only supported for testing purposes.
    • options (object) — These options are supported:
      • url (string) — The URL to send the telemetry to. Required for otlp exporter. This has no effect on console and memory exporters.
      • headers (object) — Optional headers to send with the telemetry. This has no effect on console and memory exporters.

Note that OTLP traces can be consumed by different solutions, like Jaeger. Here the full list.

Example

{
"telemetry": {
"serviceName": "test-service",
"exporter": {
"type": "otlp",
"options": {
"url": "http://localhost:4318/v1/traces"
}
}
}
}

Environment variable placeholders

The value for any configuration setting can be replaced with an environment variable +by adding a placeholder in the configuration file, for example {PLT_SERVER_LOGGER_LEVEL}.

All placeholders in a configuration must be available as an environment variable +and must meet the allowed placeholder name rules.

Example

platformatic.service.json
{
"server": {
"port": "{PORT}"
}
}

Platformatic will replace the placeholders in this example with the environment +variables of the same name.

Setting environment variables

If a .env file exists it will automatically be loaded by Platformatic using +dotenv. For example:

.env
PLT_SERVER_LOGGER_LEVEL=info
PORT=8080

The .env file must be located in the same folder as the Platformatic configuration +file or in the current working directory.

Environment variables can also be set directly on the commmand line, for example:

PLT_SERVER_LOGGER_LEVEL=debug npx platformatic service

Allowed placeholder names

Only placeholder names prefixed with PLT_, or that are in this allow list, will be +dynamically replaced in the configuration file:

  • PORT
  • DATABASE_URL

This restriction is to avoid accidentally exposing system environment variables. +An error will be raised by Platformatic if it finds a configuration placeholder +that isn't allowed.

The default allow list can be extended by passing a --allow-env CLI option with a +comma separated list of strings, for example:

npx platformatic service --allow-env=HOST,SERVER_LOGGER_LEVEL

If --allow-env is passed as an option to the CLI, it will be merged with the +default allow list.

+ + + + \ No newline at end of file diff --git a/docs/0.41.2/reference/service/introduction/index.html b/docs/0.41.2/reference/service/introduction/index.html new file mode 100644 index 00000000000..491f4a263af --- /dev/null +++ b/docs/0.41.2/reference/service/introduction/index.html @@ -0,0 +1,21 @@ + + + + + +Platformatic Service | Platformatic Open Source Software + + + + + +
+
Version: 0.41.2

Platformatic Service

Platformatic Service is an HTTP server that provides a developer tools for +building robust APIs with Node.js.

For a high level overview of how Platformatic DB works, please reference the +Architecture guide.

info

Platformatic Service is currently in public beta.

Features

Public beta

Platformatic Service is in public beta. You can use it in production, but it's quite +likely that you'll encounter significant bugs.

If you run into a bug or have a suggestion for improvement, please +raise an issue on GitHub.

Standalone usage

If you're only interested in the features available in Platformatic Service, you can simply switch platformatic with @platformatic/service in the dependencies of your package.json, so that you'll only import fewer deps.

You can use the plt-service command, it's the equivalent of plt service.

+ + + + \ No newline at end of file diff --git a/docs/0.41.2/reference/service/plugin/index.html b/docs/0.41.2/reference/service/plugin/index.html new file mode 100644 index 00000000000..9b810896a89 --- /dev/null +++ b/docs/0.41.2/reference/service/plugin/index.html @@ -0,0 +1,21 @@ + + + + + +Plugin | Platformatic Open Source Software + + + + + +
+
Version: 0.41.2

Plugin

If you want to add features to a service, you will need to register a plugin, which will be in the form of a standard Fastify plugin.

The config file will specify where the plugin file is located as the example below:

{
...
"plugins": {
"paths": ["./plugin/index.js"]
}
}

The path is relative to the config file path.

You should export an async function which receives a parameters

  • app (FastifyInstance) that is the main fastify instance
  • opts all the options specified in the config file after path

Hot Reload

Plugin file is being watched by fs.watch function.

You don't need to reload Platformatic Service server while working on your plugin. Every time you save, the watcher will trigger a reload event and the server will auto-restart and load your updated code.

tip

At this time, on Linux, file watch in subdirectories is not supported due to a Node.js limitation (documented here).

Directories

The path can also be a directory. In that case, the directory will be loaded with @fastify/autoload.

Consider the following directory structure:

├── routes
│ ├── foo
│ │ ├── something.js
│ │ └── bar
│ │ └── baz.js
│ ├── single-plugin
│ │ └── utils.js
│ └── another-plugin.js
└── platformatic.service.json

By default the folder will be added as a prefix to all the routes defined within them. +See the autoload documentation for all the options to customize this behavior.

Multiple plugins

Multiple plugins can be loaded in parallel by specifying an array:

{
...
"plugins": {
"paths": [{
"path": "./plugin/index.js"
}, {
"path": "./routes/"
}]
}
}

TypeScript and Autocompletion

In order to provide the correct typings of the features added by Platformatic Service to your Fastify instance, +add the following at the top of your files:

/// <references types="@platformatic/service" />

Plugin definition with TypeScript

Here is an example of writing a plugin in TypeScript:

/// <reference types="@platformatic/service" />
import { FastifyInstance, FastifyPluginOptions } from 'fastify'

export default async function (fastify: FastifyInstance, opts: FastifyPluginOptions) {
}

Note that you need to add the "typescript": true configuration to your platformatic.service.json.

Loading compiled files

Setting "typescript": false but including a tsconfig.json with an outDir +option, will instruct Platformatic Service to try loading your plugins from that folder instead. +This setup is needed to support pre-compiled sources to reduce cold start time during deployment.

+ + + + \ No newline at end of file diff --git a/docs/0.41.2/reference/service/programmatic/index.html b/docs/0.41.2/reference/service/programmatic/index.html new file mode 100644 index 00000000000..92abb20de45 --- /dev/null +++ b/docs/0.41.2/reference/service/programmatic/index.html @@ -0,0 +1,19 @@ + + + + + +Programmatic API | Platformatic Open Source Software + + + + + +
+
Version: 0.41.2

Programmatic API

In many cases it's useful to start Platformatic Service using an API instead of +command line, e.g. in tests we want to start and stop our server.

The buildServer function allows that:

import { buildServer } from '@platformatic/service'

const app = await buildServer('path/to/platformatic.service.json')

await app.start()

const res = await fetch(app.url)
console.log(await res.json())

// do something

await app.close()

It is also possible to customize the configuration:

import { buildServer } from '@platformatic/service'

const app = await buildServer({
server: {
hostname: '127.0.0.1',
port: 0
}
})

await app.start()

const res = await fetch(app.url)
console.log(await res.json())

// do something

await app.close()

Creating a reusable application on top of Platformatic Service

Platformatic DB is built on top of Platformatic Serivce. +If you want to build a similar kind of tool, follow this example:

import { buildServer, schema } from '@platformatic/service'

async function myPlugin (app, opts) {
// app.platformatic.configManager contains an instance of the ConfigManager
console.log(app.platformatic.configManager.current)

await platformaticService(app, opts)
}

// break Fastify encapsulation
myPlugin[Symbol.for('skip-override')] = true
myPlugin.configType = 'myPlugin'

// This is the schema for this reusable application configuration file,
// customize at will but retain the base properties of the schema from
// @platformatic/service
myPlugin.schema = schema

// The configuration of the ConfigManager
myPlugin.configManagerConfig = {
schema: foo.schema,
envWhitelist: ['PORT', 'HOSTNAME'],
allowToWatch: ['.env'],
schemaOptions: {
useDefaults: true,
coerceTypes: true,
allErrors: true,
strict: false
},
async transformConfig () {
console.log(this.current) // this is the current config

// In this method you can alter the configuration before the application
// is started. It's useful to apply some defaults that cannot be derived
// inside the schema, such as resolving paths.
}
}


const server = await buildServer('path/to/config.json', myPlugin)

await server.start()

const res = await fetch(server.listeningOrigin)
console.log(await res.json())

// do something

await service.close()
+ + + + \ No newline at end of file diff --git a/docs/0.41.2/reference/sql-events/fastify-plugin/index.html b/docs/0.41.2/reference/sql-events/fastify-plugin/index.html new file mode 100644 index 00000000000..f42ae3be600 --- /dev/null +++ b/docs/0.41.2/reference/sql-events/fastify-plugin/index.html @@ -0,0 +1,19 @@ + + + + + +Fastify Plugin | Platformatic Open Source Software + + + + + +
+
Version: 0.41.2

Fastify Plugin

The @platformatic/sql-events package exports a Fastify plugin that can be used out-of the box in a server application. +It requires that @platformatic/sql-mapper is registered before it.

The plugin has the following options:

The plugin adds the following properties to the app.platformatic object:

  • mq — an instance of mqemitter
  • subscribe(topics) — a method to create a node Readable +that will contain the events emitted by those topics.

Each entities of app.platformatic.entities will be augmented with two functions:

  • entity.getPublishTopic({ ctx, data, action })
  • entity.getSubscriptionTopic({ ctx, action })

Where ctx is the GraphQL Context, data is the object that will be emitted and action is either save or delete.

Usage

'use strict'

const Fastify = require('fastify')
const mapper = require('@platformatic/sql-mapper')
const events = require('@platformatic/sql-events')

async function main() {
const app = Fastify({
logger: {
level: 'info'
}
})
app.register(mapper.plugin, {
connectionString: 'postgres://postgres:postgres@127.0.0.1/postgres'
})

app.register(events)

// setup your routes


await app.listen({ port: 3333 })
}

main()
+ + + + \ No newline at end of file diff --git a/docs/0.41.2/reference/sql-events/introduction/index.html b/docs/0.41.2/reference/sql-events/introduction/index.html new file mode 100644 index 00000000000..a76645cd913 --- /dev/null +++ b/docs/0.41.2/reference/sql-events/introduction/index.html @@ -0,0 +1,21 @@ + + + + + +Introduction to the sql-events module | Platformatic Open Source Software + + + + + +
+
Version: 0.41.2

Introduction to the sql-events module

The Platformatic DB sql-events uses mqemitter to publish events when entities are saved and deleted.

These events are useful to distribute updates to clients, e.g. via WebSocket, Server-Sent Events, or GraphQL Subscritions. +When subscribing and using a multi-process system with a broker like Redis, a subscribed topic will receive the data from all +the other processes.

They are not the right choice for executing some code whenever an entity is created, modified or deleted, in that case +use @platformatic/sql-mapper hooks.

Install

You can use together with @platformatic/sql-mapper.

npm i @platformatic/sql-mapper @platformatic/sql-events

Usage

const { connect } = require('@platformatic/sql-mapper')
const { setupEmitter } = require('@platformatic/sql-events')
const { pino } = require('pino')

const log = pino()

async function onDatabaseLoad (db, sql) {
await db.query(sql`CREATE TABLE pages (
id SERIAL PRIMARY KEY,
title VARCHAR(255) NOT NULL
);`)
}
const connectionString = 'postgres://postgres:postgres@localhost:5432/postgres'
const mapper = await connect({
connectionString,
log,
onDatabaseLoad,
ignore: {},
hooks: {
Page: {
find: async function(_find, opts) {
console.log('hook called');
return await _find(opts)
}
}
}
})

setupEmitter({ mapper, log })

const pageEntity = mapper.entities.page

const queue = await mapper.subscribe([
pageEntity.getSubscriptionTopic({ action: 'save' }),
pageEntity.getSubscriptionTopic({ action: 'delete' })
])

const page = await pageEntity.save({
input: { title: 'fourth page' }
})

const page2 = await pageEntity.save({
input: {
id: page.id,
title: 'fifth page'
}
})

await pageEntity.delete({
where: {
id: {
eq: page.id
}
},
fields: ['id', 'title']
})

for await (const ev of queue) {
console.log(ev)
if (expected.length === 0) {
break
}
}

process.exit(0)

API

The setupEmitter function has the following options:

The setupEmitter functions adds the following properties to the mapper object:

  • mq — an instance of mqemitter
  • subscribe(topics) — a method to create a node Readable +that will contain the events emitted by those topics.

Each entities of app.platformatic.entities will be augmented with two functions:

  • entity.getPublishTopic({ ctx, data, action })
  • entity.getSubscriptionTopic({ ctx, action })

Where ctx is the GraphQL Context, data is the object that will be emitted and action is either save or delete.

+ + + + \ No newline at end of file diff --git a/docs/0.41.2/reference/sql-graphql/ignore/index.html b/docs/0.41.2/reference/sql-graphql/ignore/index.html new file mode 100644 index 00000000000..11cd4089b2d --- /dev/null +++ b/docs/0.41.2/reference/sql-graphql/ignore/index.html @@ -0,0 +1,17 @@ + + + + + +Ignoring types and fields | Platformatic Open Source Software + + + + + + + + + + \ No newline at end of file diff --git a/docs/0.41.2/reference/sql-graphql/introduction/index.html b/docs/0.41.2/reference/sql-graphql/introduction/index.html new file mode 100644 index 00000000000..6bbf7470842 --- /dev/null +++ b/docs/0.41.2/reference/sql-graphql/introduction/index.html @@ -0,0 +1,21 @@ + + + + + +Introduction to the GraphQL API | Platformatic Open Source Software + + + + + +
+
Version: 0.41.2

Introduction to the GraphQL API

The Platformatic DB GraphQL plugin starts a GraphQL server wand makes it available +via a /graphql endpoint. This endpoint is automatically ready to run queries and +mutations against your entities. This functionality is powered by +Mercurius.

GraphiQL

The GraphiQL web UI is integrated into +Platformatic DB. To enable it you can pass an option to the sql-graphql plugin:

app.register(graphqlPlugin, { graphiql: true })

The GraphiQL interface is made available under the /graphiql path.

+ + + + \ No newline at end of file diff --git a/docs/0.41.2/reference/sql-graphql/many-to-many/index.html b/docs/0.41.2/reference/sql-graphql/many-to-many/index.html new file mode 100644 index 00000000000..38bbb3a5d7c --- /dev/null +++ b/docs/0.41.2/reference/sql-graphql/many-to-many/index.html @@ -0,0 +1,20 @@ + + + + + +Many To Many Relationship | Platformatic Open Source Software + + + + + +
+
Version: 0.41.2

Many To Many Relationship

Many-to-Many relationship lets you relate each row in one table to many rows in +another table and vice versa.

Many-to-many relationship are implemented in SQL via a "join table", a table whose primary key +is composed by the identifier of the two parts of the many-to-many relationship.

Platformatic DB fully support many-to-many relationships on all supported databases.

Example

Consider the following schema (SQLite):

CREATE TABLE pages (
id INTEGER PRIMARY KEY,
the_title VARCHAR(42)
);

CREATE TABLE users (
id INTEGER PRIMARY KEY,
username VARCHAR(255) NOT NULL
);

CREATE TABLE editors (
page_id INTEGER NOT NULL,
user_id INTEGER NOT NULL,
role VARCHAR(255) NOT NULL,
CONSTRAINT fk_editor_pages FOREIGN KEY (page_id) REFERENCES pages(id),
CONSTRAINT fk_editor_users FOREIGN KEY (user_id) REFERENCES users(id),
PRIMARY KEY (page_id, user_id)
);

The table editors is a "join table" between users and pages. +Given this schema, you could issue queries like:

query {
editors(orderBy: { field: role, direction: DESC }) {
user {
id
username
}
page {
id
theTitle
}
role
}
}

Mutation works exactly the same as before:

mutation {
saveEditor(input: { userId: "1", pageId: "1", role: "captain" }) {
user {
id
username
}
page {
id
theTitle
}
role
}
}
+ + + + \ No newline at end of file diff --git a/docs/0.41.2/reference/sql-graphql/mutations/index.html b/docs/0.41.2/reference/sql-graphql/mutations/index.html new file mode 100644 index 00000000000..b79fbfe04e4 --- /dev/null +++ b/docs/0.41.2/reference/sql-graphql/mutations/index.html @@ -0,0 +1,20 @@ + + + + + +Mutations | Platformatic Open Source Software + + + + + +
+
Version: 0.41.2

Mutations

When the GraphQL plugin is loaded, some mutations are automatically adding to +the GraphQL schema.

save[ENTITY]

Saves a new entity to the database or updates an existing entity. +This actually behaves as an upsert, allowing both behaviours depending on the presence of the primary key field.

Example

'use strict'

const Fastify = require('fastify')
const graphqlPlugin = require('@platformatic/sql-graphql')
const sqlMapper = require('@platformatic/sql-mapper')

async function main() {
const app = Fastify({
logger: {
level: 'info'
}
})
app.register(sqlMapper, {
connectionString: 'postgres://postgres:postgres@127.0.0.1/postgres',
log: logger,
})
app.register(graphqlPlugin, {
graphiql: true
})
const res = await app.inject({
method: 'POST',
url: '/graphql',
body: {
query: `
mutation {
savePage(input: { id: 3 title: "Platformatic is cool!" }) {
id
title
}
}
`
}
})
const result = await res.json()
console.log(result.data) // { savePage: { id: '3', title: 'Platformatic is cool!' } }
await app.close()
}

main()

insert[ENTITY]

Inserts a new entity in the database.

Example

'use strict'

const Fastify = require('fastify')
const graphqlPlugin = require('@platformatic/sql-graphql')
const sqlMapper = require('@platformatic/sql-mapper')

async function main() {
const app = Fastify({
logger: {
level: 'info'
}
})
app.register(sqlMapper, {
connectionString: 'postgres://postgres:postgres@127.0.0.1/postgres',
log: logger,
})
app.register(graphqlPlugin, {
graphiql: true
})
const res = await app.inject({
method: 'POST',
url: '/graphql',
body: {
query: `
mutation {
savePage(input: { title: "Platformatic is cool!" }) {
id
title
}
}
`
}
})
const result = await res.json()
console.log(result.data) // { savePage: { id: '4', title: 'Platformatic is cool!' } }
await app.close()
}

main()

delete[ENTITIES]

Deletes one or more entities from the database, based on the where clause +passed as an input to the mutation.

Example

'use strict'

const Fastify = require('fastify')
const graphqlPlugin = require('@platformatic/sql-graphql')
const sqlMapper = require('@platformatic/sql-mapper')

async function main() {
const app = Fastify({
logger: {
level: 'info'
}
})
app.register(sqlMapper, {
connectionString: 'postgres://postgres:postgres@127.0.0.1/postgres',
log: logger,
})
app.register(graphqlPlugin, {
graphiql: true
})
const res = await app.inject({
method: 'POST',
url: '/graphql',
body: {
query: `
mutation {
deletePages(where: { id: { eq: "3" } }) {
id
title
}
}
`
}
})
const result = await res.json()
console.log(result.data) // { deletePages: [ { id: '3', title: 'Platformatic is cool!' } ] }
await app.close()
}

main()
+ + + + \ No newline at end of file diff --git a/docs/0.41.2/reference/sql-graphql/queries/index.html b/docs/0.41.2/reference/sql-graphql/queries/index.html new file mode 100644 index 00000000000..1b0e6e3f5ae --- /dev/null +++ b/docs/0.41.2/reference/sql-graphql/queries/index.html @@ -0,0 +1,21 @@ + + + + + +Queries | Platformatic Open Source Software + + + + + +
+
Version: 0.41.2

Queries

A GraphQL query is automatically added to the GraphQL schema for each database +table, along with a complete mapping for all table fields.

Example

'use strict'

const Fastify = require('fastify')
const graphqlPlugin = require('@platformatic/sql-graphql')
const sqlMapper = require('@platformatic/sql-mapper')
async function main() {
const app = Fastify({
logger: {
level: 'info'
}
})
app.register(sqlMapper, {
connectionString: 'postgres://postgres:postgres@127.0.0.1/postgres'
})
app.register(graphqlPlugin, {
graphiql: true
})
const res = await app.inject({
method: 'POST',
url: '/graphql',
body: {
query: `
query{
pages{
id,
title
}
}
`
}
})
const result = await res.json()
console.log(result.data)
await app.close()
}
main()

Advanced Queries

The following additional queries are added to the GraphQL schema for each entity:

get[ENTITY]by[PRIMARY_KEY]

If you have a table pages with the field id as the primary key, you can run +a query called getPageById.

Example

...
const res = await app.inject({
method: 'POST',
url: '/graphql',
body: {
query: `
query{
getPageById(id: 3) {
id,
title
}
}
`
}
})
const result = await res.json()
console.log(result.data) // { getPageById: { id: '3', title: 'A fiction' } }

count[ENTITIES]

...
const res = await app.inject({
method: 'POST',
url: '/graphql',
body: {
query: `
query {
countPages {
total
}
}
`
}
})
const result = await res.json()
console.log(result.data) // { countMovies : { total: { 17 } }

Pagination

The Platformatic DB supports for result's pagination through input parameters: limit and offset

Example

{
users(limit:5, offset: 10) {
name
}
}

It returns 5 users starting from position 10.

Limit

By default a limit value (10) is applied to each request.

Clients can override this behavior by passing a value. +In this case the server validates the input and an error is return if exceeds the max accepted value (100).

Limit's values can be customized through configuration:

{
...
"db": {
...
"limit": {
"default": 50,
"max": 1000
}
}
}

Limit only accepts values >= 0. Otherwise an error is return.

Offset

By default offset is not applied to the request. +Clients can override this behavior by passing a value.

Offset only accepts values >= 0. Otherwise an error is return.

+ + + + \ No newline at end of file diff --git a/docs/0.41.2/reference/sql-graphql/subscriptions/index.html b/docs/0.41.2/reference/sql-graphql/subscriptions/index.html new file mode 100644 index 00000000000..fdf0677f928 --- /dev/null +++ b/docs/0.41.2/reference/sql-graphql/subscriptions/index.html @@ -0,0 +1,19 @@ + + + + + +Subscription | Platformatic Open Source Software + + + + + +
+
Version: 0.41.2

Subscription

When the GraphQL plugin is loaded, some subscriptions are automatically adding to +the GraphQL schema if the @platformatic/sql-events plugin has been previously registered.

It's possible to avoid creating the subscriptions for a given entity by adding the subscriptionIgnore config, +like so: subscriptionIgnore: ['page'].

[ENTITY]Saved

Published whenever an entity is saved, e.g. when the mutation insert[ENTITY] or save[ENTITY] are called.

[ENTITY]Deleted

Published whenever an entity is deleted, e.g. when the mutation delete[ENTITY] is called..

+ + + + \ No newline at end of file diff --git a/docs/0.41.2/reference/sql-mapper/entities/api/index.html b/docs/0.41.2/reference/sql-mapper/entities/api/index.html new file mode 100644 index 00000000000..cc726fa9409 --- /dev/null +++ b/docs/0.41.2/reference/sql-mapper/entities/api/index.html @@ -0,0 +1,18 @@ + + + + + +API | Platformatic Open Source Software + + + + + +
+
Version: 0.41.2

API

A set of operation methods are available on each entity:

Returned fields

The entity operation methods accept a fields option that can specify an array of field names to be returned. If not specified, all fields will be returned.

Where clause

The entity operation methods accept a where option to allow limiting of the database rows that will be affected by the operation.

The where object's key is the field you want to check, the value is a key/value map where the key is an operator (see the table below) and the value is the value you want to run the operator against.

Platformatic operatorSQL operator
eq'='
in'IN'
nin'NOT IN'
neq'<>'
gt'>'
gte'>='
lt'<'
lte'<='
like'LIKE'

Examples

Selects row with id = 1

{
...
"where": {
id: {
eq: 1
}
}
}

Select all rows with id less than 100

{
...
"where": {
id: {
lt: 100
}
}
}

Select all rows with id 1, 3, 5 or 7

{
...
"where": {
id: {
in: [1, 3, 5, 7]
}
}
}

Where clause operations are by default combined with the AND operator. To combine them with the OR operator, use the or key.

Select all rows with id 1 or 3

{
...
"where": {
or: [
{
id: {
eq: 1
}
},
{
id: {
eq: 3
}
}
]
}
}

Select all rows with id 1 or 3 and title like 'foo%'

{
...
"where": {
or: [
{
id: {
eq: 1
}
},
{
id: {
eq: 3
}
}
],
title: {
like: 'foo%'
}
}
}

Reference

find

Retrieve data for an entity from the database.

Options

NameTypeDescription
fieldsArray of stringList of fields to be returned for each object
whereObjectWhere clause 🔗
orderByArray of ObjectObject like { field: 'counter', direction: 'ASC' }
limitNumberLimits the number of returned elements
offsetNumberThe offset to start looking for rows from

Usage

'use strict'

const { connect } = require('@platformatic/sql-mapper')
const { pino } = require('pino')
const pretty = require('pino-pretty')
const logger = pino(pretty())

async function main() {
const pgConnectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
const mapper = await connect({
connectionString: pgConnectionString,
log: logger,
})
const res = await mapper.entities.page.find({
fields: ['id', 'title',],
where: {
id: {
lt: 10
}
},
})
logger.info(res)
await mapper.db.dispose()
}
main()

count

Same as find, but only count entities.

Options

NameTypeDescription
whereObjectWhere clause 🔗

Usage

'use strict'

const { connect } = require('@platformatic/sql-mapper')
const { pino } = require('pino')
const pretty = require('pino-pretty')
const logger = pino(pretty())

async function main() {
const pgConnectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
const mapper = await connect({
connectionString: pgConnectionString,
log: logger,
})
const res = await mapper.entities.page.count({
where: {
id: {
lt: 10
}
},
})
logger.info(res)
await mapper.db.dispose()
}
main()

insert

Insert one or more entity rows in the database.

Options

NameTypeDescription
fieldsArray of stringList of fields to be returned for each object
inputsArray of ObjectEach object is a new row

Usage

'use strict'

const { connect } = require('@platformatic/sql-mapper')
const { pino } = require('pino')
const pretty = require('pino-pretty')
const logger = pino(pretty())

async function main() {
const pgConnectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
const mapper = await connect({
connectionString: pgConnectionString,
log: logger,
})
const res = await mapper.entities.page.insert({
fields: ['id', 'title' ],
inputs: [
{ title: 'Foobar' },
{ title: 'FizzBuzz' }
],
})
logger.info(res)
/**
0: {
"id": "16",
"title": "Foobar"
}
1: {
"id": "17",
"title": "FizzBuzz"
}
*/
await mapper.db.dispose()
}
main()

save

Create a new entity row in the database or update an existing one.

To update an existing entity, the id field (or equivalent primary key) must be included in the input object. +save actually behaves as an upsert, allowing both behaviours depending on the presence of the primary key field.

Options

NameTypeDescription
fieldsArray of stringList of fields to be returned for each object
inputObjectThe single row to create/update

Usage

'use strict'
const { connect } = require('@platformatic/sql-mapper')
const { pino } = require('pino')
const pretty = require('pino-pretty')
const logger = pino(pretty())

async function main() {
const connectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
const mapper = await connect({
connectionString: connectionString,
log: logger,
})
const res = await mapper.entities.page.save({
fields: ['id', 'title' ],
input: { id: 1, title: 'FizzBuzz' },
})
logger.info(res)
await mapper.db.dispose()
}
main()

delete

Delete one or more entity rows from the database, depending on the where option. Returns the data for all deleted objects.

Options

NameTypeDescription
fieldsArray of stringList of fields to be returned for each object
whereObjectWhere clause 🔗

Usage

'use strict'
const { connect } = require('@platformatic/sql-mapper')
const { pino } = require('pino')
const pretty = require('pino-pretty')
const logger = pino(pretty())

async function main() {
const connectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
const mapper = await connect({
connectionString: connectionString,
log: logger,
})
const res = await mapper.entities.page.delete({
fields: ['id', 'title',],
where: {
id: {
lt: 4
}
},
})
logger.info(res)
await mapper.db.dispose()
}
main()

updateMany

Update one or more entity rows from the database, depending on the where option. Returns the data for all updated objects.

Options

NameTypeDescription
whereObjectWhere clause 🔗
inputObjectThe new values that want to update
fieldsArray of stringList of fields to be returned for each object

Usage

'use strict'
const { connect } = require('@platformatic/sql-mapper')
const { pino } = require('pino')
const pretty = require('pino-pretty')
const logger = pino(pretty())

async function main() {
const connectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
const mapper = await connect({
connectionString: connectionString,
log: logger,
})
const res = await mapper.entities.page.updateMany({
fields: ['id', 'title',],
where: {
counter: {
gte: 30
}
},
input: {
title: 'Updated title'
}
})
logger.info(res)
await mapper.db.dispose()
}
main()

+ + + + \ No newline at end of file diff --git a/docs/0.41.2/reference/sql-mapper/entities/example/index.html b/docs/0.41.2/reference/sql-mapper/entities/example/index.html new file mode 100644 index 00000000000..07dec42269f --- /dev/null +++ b/docs/0.41.2/reference/sql-mapper/entities/example/index.html @@ -0,0 +1,17 @@ + + + + + +Example | Platformatic Open Source Software + + + + + +
+
Version: 0.41.2

Example

Given this PostgreSQL SQL schema:

CREATE TABLE "categories" (
"id" int4 NOT NULL DEFAULT nextval('categories_id_seq'::regclass),
"name" varchar(255) NOT NULL,
PRIMARY KEY ("id")
);

CREATE TABLE "pages" (
"id" int4 NOT NULL DEFAULT nextval('pages_id_seq'::regclass),
"title" varchar(255) NOT NULL,
"category_id" int4,
"user_id" int4,
PRIMARY KEY ("id")
);

ALTER TABLE "pages" ADD FOREIGN KEY ("category_id") REFERENCES "categories"("id");

app.platformatic.entities will contain this mapping object:

{
"category": {
"name": "Category",
"singularName": "category",
"pluralName": "categories",
"primaryKey": "id",
"table": "categories",
"fields": {
"id": {
"sqlType": "int4",
"isNullable": false,
"primaryKey": true,
"camelcase": "id"
},
"name": {
"sqlType": "varchar",
"isNullable": false,
"camelcase": "name"
}
},
"camelCasedFields": {
"id": {
"sqlType": "int4",
"isNullable": false,
"primaryKey": true,
"camelcase": "id"
},
"name": {
"sqlType": "varchar",
"isNullable": false,
"camelcase": "name"
}
},
"relations": [],
"reverseRelationships": [
{
"sourceEntity": "Page",
"relation": {
"constraint_catalog": "postgres",
"constraint_schema": "public",
"constraint_name": "pages_category_id_fkey",
"table_catalog": "postgres",
"table_schema": "public",
"table_name": "pages",
"constraint_type": "FOREIGN KEY",
"is_deferrable": "NO",
"initially_deferred": "NO",
"enforced": "YES",
"column_name": "category_id",
"ordinal_position": 1,
"position_in_unique_constraint": 1,
"foreign_table_name": "categories",
"foreign_column_name": "id"
}
}
]
},
"page": {
"name": "Page",
"singularName": "page",
"pluralName": "pages",
"primaryKey": "id",
"table": "pages",
"fields": {
"id": {
"sqlType": "int4",
"isNullable": false,
"primaryKey": true,
"camelcase": "id"
},
"title": {
"sqlType": "varchar",
"isNullable": false,
"camelcase": "title"
},
"category_id": {
"sqlType": "int4",
"isNullable": true,
"foreignKey": true,
"camelcase": "categoryId"
},
"user_id": {
"sqlType": "int4",
"isNullable": true,
"camelcase": "userId"
}
},
"camelCasedFields": {
"id": {
"sqlType": "int4",
"isNullable": false,
"primaryKey": true,
"camelcase": "id"
},
"title": {
"sqlType": "varchar",
"isNullable": false,
"camelcase": "title"
},
"categoryId": {
"sqlType": "int4",
"isNullable": true,
"foreignKey": true,
"camelcase": "categoryId"
},
"userId": {
"sqlType": "int4",
"isNullable": true,
"camelcase": "userId"
}
},
"relations": [
{
"constraint_catalog": "postgres",
"constraint_schema": "public",
"constraint_name": "pages_category_id_fkey",
"table_catalog": "postgres",
"table_schema": "public",
"table_name": "pages",
"constraint_type": "FOREIGN KEY",
"is_deferrable": "NO",
"initially_deferred": "NO",
"enforced": "YES",
"column_name": "category_id",
"ordinal_position": 1,
"position_in_unique_constraint": 1,
"foreign_table_name": "categories",
"foreign_column_name": "id"
}
],
"reverseRelationships": []
}
}
+ + + + \ No newline at end of file diff --git a/docs/0.41.2/reference/sql-mapper/entities/fields/index.html b/docs/0.41.2/reference/sql-mapper/entities/fields/index.html new file mode 100644 index 00000000000..212e6ab4e14 --- /dev/null +++ b/docs/0.41.2/reference/sql-mapper/entities/fields/index.html @@ -0,0 +1,17 @@ + + + + + +Fields | Platformatic Open Source Software + + + + + +
+
Version: 0.41.2

Fields

When Platformatic DB inspects a database's schema, it creates an object for each table that contains a mapping of their fields.

These objects contain the following properties:

  • singularName: singular entity name, based on table name. Uses inflected under the hood.
  • pluralName: plural entity name (i.e 'pages')
  • primaryKey: the field which is identified as primary key.
  • table: original table name
  • fields: an object containing all fields details. Object key is the field name.
  • camelCasedFields: an object containing all fields details in camelcase. If you have a column named user_id you can access it using both userId or user_id

Fields detail

  • sqlType: The original field type. It may vary depending on the underlying DB Engine
  • isNullable: Whether the field can be null or not
  • primaryKey: Whether the field is the primary key or not
  • camelcase: The camelcased value of the field

Example

Given this SQL Schema (for PostgreSQL):

CREATE SEQUENCE IF NOT EXISTS pages_id_seq;
CREATE TABLE "public"."pages" (
"id" int4 NOT NULL DEFAULT nextval('pages_id_seq'::regclass),
"title" varchar,
"body_content" text,
"category_id" int4,
PRIMARY KEY ("id")
);

The resulting mapping object will be:

{
singularName: 'page',
pluralName: 'pages',
primaryKey: 'id',
table: 'pages',
fields: {
id: {
sqlType: 'int4',
isNullable: false,
primaryKey: true,
camelcase: 'id'
},
title: {
sqlType: 'varchar',
isNullable: true,
camelcase: 'title'
},
body_content: {
sqlType: 'text',
isNullable: true,
camelcase: 'bodyContent'
},
category_id: {
sqlType: 'int4',
isNullable: true,
foreignKey: true,
camelcase: 'categoryId'
}
}
camelCasedFields: {
id: {
sqlType: 'int4',
isNullable: false,
primaryKey: true,
camelcase: 'id'
},
title: {
sqlType: 'varchar',
isNullable: true,
camelcase: 'title'
},
bodyContent: {
sqlType: 'text',
isNullable: true,
camelcase: 'bodyContent'
},
categoryId: {
sqlType: 'int4',
isNullable: true,
foreignKey: true,
camelcase: 'categoryId'
}
},
relations: []
}
+ + + + \ No newline at end of file diff --git a/docs/0.41.2/reference/sql-mapper/entities/hooks/index.html b/docs/0.41.2/reference/sql-mapper/entities/hooks/index.html new file mode 100644 index 00000000000..56ebd7e2c4b --- /dev/null +++ b/docs/0.41.2/reference/sql-mapper/entities/hooks/index.html @@ -0,0 +1,17 @@ + + + + + +Hooks | Platformatic Open Source Software + + + + + +
+
Version: 0.41.2

Hooks

Entity hooks are a way to wrap the API methods for an entity and add custom behaviour.

The Platformatic DB SQL Mapper provides an addEntityHooks(entityName, spec) function that can be used to add hooks for an entity.

How to use hooks

addEntityHooks accepts two arguments:

  1. A string representing the entity name (singularized), for example 'page'.
  2. A key/value object where the key is one of the API methods (find, insert, save, delete) and the value is a callback function. The callback will be called with the original API method and the options that were passed to that method. See the example below.

Usage

'use strict'
const { connect } = require('@platformatic/sql-mapper')
const { pino } = require('pino')
const pretty = require('pino-pretty')
const logger = pino(pretty())

async function main() {
const pgConnectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
const mapper = await connect({
connectionString: pgConnectionString,
log: logger,
})
mapper.addEntityHooks('page', {
find: async (originalFind, opts) => {
// Add a `foo` field with `bar` value to each row
const res = await originalFind(opts)
return res.map((row) => {
row.foo = 'bar'
return row
})
}
})
const res = await mapper.entities.page.find({
fields: ['id', 'title',],
where: {
id: {
lt: 10
}
},
})
logger.info(res)
/**
[
0: {
"id": "5",
"title": "Page 1",
"foo": "bar"
},
1: {
"id": "6",
"title": "Page 2",
"foo": "bar"
}
]
*/
await mapper.db.dispose()
}
main()

Multiple Hooks

Multiple hooks can be added for the same entity and API method, for example:

'use strict'
const { connect } = require('@platformatic/sql-mapper')
const { pino } = require('pino')
const pretty = require('pino-pretty')
const logger = pino(pretty())

async function main() {
const pgConnectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
const mapper = await connect({
connectionString: pgConnectionString,
log: logger,
})
mapper.addEntityHooks('page', {
find: async function firstHook(previousFunction, opts) {
// Add a `foo` field with `bar` value to each row
const res = await previousFunction(opts)
return res.map((row) => {
row.foo = 'bar'
return row
})
}
})
mapper.addEntityHooks('page', {
find: async function secondHook(previousFunction, opts) {
// Add a `bar` field with `baz` value to each row
const res = await previousFunction(opts)
return res.map((row) => {
row.bar = 'baz'
return row
})
}
})
const res = await mapper.entities.page.find({
fields: ['id', 'title',],
where: {
id: {
lt: 10
}
},
})
logger.info(res)
/**
[
0: {
"id": "5",
"title": "Page 1",
"foo": "bar",
"bar": "baz"
},
1: {
"id": "6",
"title": "Page 2",
"foo": "bar",
"bar": "baz"
}
]
*/
await mapper.db.dispose()
}
main()

Since hooks are wrappers, they are being called in reverse order, like the image below

Hooks Lifecycle

So even though we defined two hooks, the Database will be hit only once.

Query result will be processed by firstHook, which will pass the result to secondHook, which will, finally, send the processed result to the original .find({...}) function.

+ + + + \ No newline at end of file diff --git a/docs/0.41.2/reference/sql-mapper/entities/introduction/index.html b/docs/0.41.2/reference/sql-mapper/entities/introduction/index.html new file mode 100644 index 00000000000..f25eef50683 --- /dev/null +++ b/docs/0.41.2/reference/sql-mapper/entities/introduction/index.html @@ -0,0 +1,17 @@ + + + + + +Introduction to Entities | Platformatic Open Source Software + + + + + +
+
Version: 0.41.2

Introduction to Entities

The primary goal of Platformatic DB is to read a database schema and generate REST and GraphQL endpoints that enable the execution of CRUD (Create/Retrieve/Update/Delete) operations against the database.

Platformatic DB includes a mapper that reads the schemas of database tables and then generates an entity object for each table.

Platformatic DB is a Fastify application. The Fastify instance object is decorated with the platformatic property, which exposes several APIs that handle the manipulation of data in the database.

Platformatic DB populates the app.platformatic.entities object with data found in database tables.

The keys on the entities object are singularized versions of the table names — for example users becomes user, categories becomes category — and the values are a set of associated metadata and functions.

+ + + + \ No newline at end of file diff --git a/docs/0.41.2/reference/sql-mapper/entities/relations/index.html b/docs/0.41.2/reference/sql-mapper/entities/relations/index.html new file mode 100644 index 00000000000..a920c7de1c2 --- /dev/null +++ b/docs/0.41.2/reference/sql-mapper/entities/relations/index.html @@ -0,0 +1,20 @@ + + + + + +Relations | Platformatic Open Source Software + + + + + +
+
Version: 0.41.2

Relations

When Platformatic DB is reading your database schema, it identifies relationships +between tables and stores metadata on them in the entity object's relations field. +This is achieved by querying the database's internal metadata.

Example

Given this PostgreSQL schema:

CREATE SEQUENCE IF NOT EXISTS categories_id_seq;

CREATE TABLE "categories" (
"id" int4 NOT NULL DEFAULT nextval('categories_id_seq'::regclass),
"name" varchar(255) NOT NULL,
PRIMARY KEY ("id")
);

CREATE SEQUENCE IF NOT EXISTS pages_id_seq;

CREATE TABLE "pages" (
"id" int4 NOT NULL DEFAULT nextval('pages_id_seq'::regclass),
"title" varchar(255) NOT NULL,
"body_content" text,
"category_id" int4,
PRIMARY KEY ("id")
);

ALTER TABLE "pages" ADD FOREIGN KEY ("category_id") REFERENCES "categories"("id");

When this code is run:

'use strict'
const { connect } = require('@platformatic/sql-mapper')
const { pino } = require('pino')
const pretty = require('pino-pretty')
const logger = pino(pretty())

async function main() {
const pgConnectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
const mapper = await connect({
connectionString: pgConnectionString,
log: logger,
})
const pageEntity = mapper.entities.page
console.log(pageEntity.relations)
await mapper.db.dispose()
}
main()

The output will be:

[
{
constraint_catalog: 'postgres',
constraint_schema: 'public',
constraint_name: 'pages_category_id_fkey',
table_catalog: 'postgres',
table_schema: 'public',
table_name: 'pages',
constraint_type: 'FOREIGN KEY',
is_deferrable: 'NO',
initially_deferred: 'NO',
enforced: 'YES',
column_name: 'category_id',
ordinal_position: 1,
position_in_unique_constraint: 1,
foreign_table_name: 'categories',
foreign_column_name: 'id'
}
]

As Platformatic DB supports multiple database engines, the contents of the +relations object will vary depending on the database being used.

The following relations fields are common to all database engines:

  • column_name — the column that stores the foreign key
  • foreign_table_name — the table hosting the related row
  • foreign_column_name — the column in foreign table that identifies the row
+ + + + \ No newline at end of file diff --git a/docs/0.41.2/reference/sql-mapper/entities/timestamps/index.html b/docs/0.41.2/reference/sql-mapper/entities/timestamps/index.html new file mode 100644 index 00000000000..6dffb250797 --- /dev/null +++ b/docs/0.41.2/reference/sql-mapper/entities/timestamps/index.html @@ -0,0 +1,17 @@ + + + + + +Timestamps | Platformatic Open Source Software + + + + + +
+
Version: 0.41.2

Timestamps

Timestamps can be used to automatically set the created_at and updated_at fields on your entities.

Timestamps are enabled by default

Configuration

To disable timestamps, you need to set the autoTimestamp field to false in configuration file:

{
...
"db": {
"connectionString": "postgres://postgres:postgres@127.0.0.1/postgres",
"autoTimestamp": false
},
...
}

Customizing the field names

By default, the created_at and updated_at fields are used. You can customize the field names by setting the createdAt and updatedAt options in autoTimestamp field in configuration file:

{
...
"db": {
"connectionString": "postgres://postgres:postgres@127.0.0.1/postgres",
"autoTimestamp": {
"createdAt": "inserted_at",
"updatedAt": "updated_at"
}
...
}
+ + + + \ No newline at end of file diff --git a/docs/0.41.2/reference/sql-mapper/entities/transactions/index.html b/docs/0.41.2/reference/sql-mapper/entities/transactions/index.html new file mode 100644 index 00000000000..e5aaf6298cb --- /dev/null +++ b/docs/0.41.2/reference/sql-mapper/entities/transactions/index.html @@ -0,0 +1,18 @@ + + + + + +Transactions | Platformatic Open Source Software + + + + + +
+
Version: 0.41.2

Transactions

Platformatic DB entites support transaction through the tx optional parameter. +If the tx parameter is provided, the entity will join the transaction, e.g.:


const { connect } = require('@platformatic/sql-mapper')
const logger = pino(pretty())

async function main() {
const pgConnectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
const { db, entities} = await connect({
connectionString: pgConnectionString,
log: logger,
})

const result = await db.tx(async tx => {
// these two operations will be executed in the same transaction
const authorResult = await entities.author.save({
fields: ['id', 'name'],
input: { name: 'test'},
tx
})
const res = await entities.page.save({
fields: ['title', 'authorId'],
input: { title: 'page title', authorId: authorResult.id },
tx
})
return res
})

}

Throwing an Error triggers a transaction rollback:

    try {
await db.tx(async tx => {
await entities.page.save({
input: { title: 'new page' },
fields: ['title'],
tx
})

// here we have `new page`
const findResult = await entities.page.find({ fields: ['title'], tx })

// (...)

// We force the rollback
throw new Error('rollback')
})
} catch (e) {
// rollback
}

// no 'new page' here...
const afterRollback = await entities.page.find({ fields: ['title'] })

+ + + + \ No newline at end of file diff --git a/docs/0.41.2/reference/sql-mapper/fastify-plugin/index.html b/docs/0.41.2/reference/sql-mapper/fastify-plugin/index.html new file mode 100644 index 00000000000..9a2d523b51a --- /dev/null +++ b/docs/0.41.2/reference/sql-mapper/fastify-plugin/index.html @@ -0,0 +1,17 @@ + + + + + +sql-mapper Fastify Plugin | Platformatic Open Source Software + + + + + +
+
Version: 0.41.2

sql-mapper Fastify Plugin

The @platformatic/sql-mapper package exports a Fastify plugin that can be used out-of the box in a server application.

A connectionString option must be passed to connect to your database.

The plugin decorates the server with a platformatic object that has the following properties:

  • db — the DB wrapper object provided by @databases
  • sql — the SQL query mapper object provided by @databases
  • entities — all entity objects with their API methods
  • addEntityHooks — a function to add a hook to an entity API method.

The plugin also decorates the Fastify Request object with the following:

  • platformaticContext: an object with the following two properties:
    • app, the Fastify application of the given route
    • reply, the Fastify Reply instance matching that request

Usage

'use strict'

const Fastify = require('fastify')
const mapper = require('@platformatic/sql-mapper')

async function main() {
const app = Fastify({
logger: {
level: 'info'
}
})
app.register(mapper.plugin, {
connectionString: 'postgres://postgres:postgres@127.0.0.1/postgres'
})

app.get('/all-pages', async (req, reply) => {
// Optionally get the platformatic context.
// Passing this to all sql-mapper functions allow to apply
// authorization rules to the database queries (amongst other things).
const ctx = req.platformaticContext

// Will return all rows from 'pages' table
const res = await app.platformatic.entities.page.find({ ctx })
return res
})

await app.listen({ port: 3333 })
}

main()
+ + + + \ No newline at end of file diff --git a/docs/0.41.2/reference/sql-mapper/introduction/index.html b/docs/0.41.2/reference/sql-mapper/introduction/index.html new file mode 100644 index 00000000000..d9acfef6075 --- /dev/null +++ b/docs/0.41.2/reference/sql-mapper/introduction/index.html @@ -0,0 +1,19 @@ + + + + + +Introduction to @platformatic/sql-mapper | Platformatic Open Source Software + + + + + +
+
Version: 0.41.2

Introduction to @platformatic/sql-mapper

@platformatic/sql-mapper is the underlining utility that Platformatic DB uses to create useful utilities to +manipulate your SQL database using JavaScript.

This module is bundled with Platformatic DB via a fastify plugin +The rest of this guide shows how to use this module directly.

Install

npm i @platformatic/sql-mapper

API

connect(opts) : Promise

It will inspect a database schema and return an object containing:

  • db — A database abstraction layer from @databases
  • sql — The SQL builder from @databases
  • entities — An object containing a key for each table found in the schema, with basic CRUD operations. See Entity Reference for details.

The valid options are:

  • connectionString — The Database connection string
  • poolSize - Maximum number of connections in the connection pool. Defaults to 10.
  • log — A logger object (like Pino)
  • onDatabaseLoad — An async function that is called after the connection is established. It will receive db and sql as parameter.
  • ignore — Object used to ignore some tables from building entities. (i.e. { 'versions': true } will ignore versions table)
  • autoTimestamp — Generate timestamp automatically when inserting/updating records.
  • hooks — For each entity name (like Page) you can customize any of the entity API function. Your custom function will receive the original function as first parameter, and then all the other parameters passed to it.

createConnectionPool(opts) : Promise

It will inspect a database schema and return an object containing:

The valid options are:

  • connectionString — The Database connection string
  • poolSize - Maximum number of connections in the connection pool. Defaults to 10.
  • log — A logger object (like Pino)

This utility is useful if you just need to connect to the db without generating any entity.

Code samples

const { connect } = require('@platformatic/sql-mapper')
const { pino } = require('pino')

const logger = pino()

async function onDatabaseLoad (db, sql) {
await db.query(sql`CREATE TABLE pages (
id SERIAL PRIMARY KEY,
title VARCHAR(255) NOT NULL
);`)
}
const connectionString =
'postgres://postgres:postgres@localhost:5432/postgres'
const mapper = await connect({
connectionString,
log: logger,
onDatabaseLoad,
ignore: {},
hooks: {
Page: {
find: async function(_find, opts) {
console.log('hook called');
return await _find(opts)
}
}
}
})
const pageEntity = mapper.entities.page

await mapper.db.query(mapper.sql`SELECT * FROM pages`)
await mapper.db.find('option1', 'option2')
+ + + + \ No newline at end of file diff --git a/docs/0.41.2/reference/sql-openapi/api/index.html b/docs/0.41.2/reference/sql-openapi/api/index.html new file mode 100644 index 00000000000..c07ecdec716 --- /dev/null +++ b/docs/0.41.2/reference/sql-openapi/api/index.html @@ -0,0 +1,22 @@ + + + + + +API | Platformatic Open Source Software + + + + + +
+
Version: 0.41.2

API

Each table is mapped to an entity named after table's name.

In the following reference we'll use some placeholders, but let's see an example

Example

Given this SQL executed against your database:

CREATE TABLE pages (
id SERIAL PRIMARY KEY,
title VARCHAR(255) NOT NULL,
body TEXT NOT NULL
);
  • [PLURAL_ENTITY_NAME] is pages
  • [SINGULAR_ENTITY_NAME] is page
  • [PRIMARY_KEY] is id
  • fields are id, title, body

GET and POST parameters

Some APIs needs the GET method, where parameters must be defined in the URL, or POST/PUT methods, where parameters can be defined in the http request payload.

Fields

Every API can define a fields parameter, representing the entity fields you want to get back for each row of the table. If not specified all fields are returned.

fields parameter are always sent in query string, even for POST, PUT and DELETE requests, as a comma separated value.

## `GET /[PLURAL_ENTITY_NAME]`

Return all entities matching where clause

Where clause

You can define many WHERE clauses in REST API, each clause includes a field, an operator and a value.

The field is one of the fields found in the schema.

The operator follows this table:

Platformatic operatorSQL operator
eq'='
in'IN'
nin'NOT IN'
neq'<>'
gt'>'
gte'>='
lt'<'
lte'<='

The value is the value you want to compare the field to.

For GET requests all these clauses are specified in the query string using the format where.[FIELD].[OPERATOR]=[VALUE]

Example

If you want to get the title and the body of every page where id < 15 you can make an HTTP request like this:

$ curl -X 'GET' \
'http://localhost:3042/pages/?fields=body,title&where.id.lt=15' \
-H 'accept: application/json'

Where clause operations are by default combined with the AND operator. To create an OR condition use the where.or query param.

Each where.or query param can contain multiple conditions separated by a | (pipe).

The where.or conditions are similar to the where conditions, except that they don't have the where prefix.

Example

If you want to get the posts where counter = 10 OR counter > 30 you can make an HTTP request like this:

$ curl -X 'GET' \
'http://localhost:3042/pages/?where.or=(counter.eq=10|counter.gte=30)' \
-H 'accept: application/json'

OrderBy clause

You can define the ordering of the returned rows within your REST API calls with the orderby clause using the following pattern:

?orderby.[field]=[asc | desc]

The field is one of the fields found in the schema. +The value can be asc or desc.

Example

If you want to get the pages ordered alphabetically by their titles you can make an HTTP request like this:

$ curl -X 'GET' \
'http://localhost:3042/pages?orderby.title=asc' \
-H 'accept: application/json'

Total Count

If totalCount boolean is true in query, the GET returns the total number of elements in the X-Total-Count header ignoring limit and offset (if specified).

$ curl -v -X 'GET' \
'http://localhost:3042/pages/?limit=2&offset=0&totalCount=true' \
-H 'accept: application/json'

(...)
> HTTP/1.1 200 OK
> x-total-count: 18
(...)

[{"id":1,"title":"Movie1"},{"id":2,"title":"Movie2"}]%

POST [PLURAL_ENTITY_NAME]

Creates a new row in table. Expects fields to be sent in a JSON formatted request body.

Example

$ curl -X 'POST' \
'http://localhost:3042/pages/' \
-H 'accept: application/json' \
-H 'Content-Type: application/json' \
-d '{
"title": "Hello World",
"body": "Welcome to Platformatic!"
}'

{
"id": 1,
"title": "Hello World",
"body": "Welcome to Platformatic"
}

GET [PLURAL_ENTITY_NAME]/[PRIMARY_KEY]

Returns a single row, identified by PRIMARY_KEY.

Example

$ curl -X 'GET' 'http://localhost:3042/pages/1?fields=title,body

{
"title": "Hello World",
"body": "Welcome to Platformatic"
}

POST [PLURAL_ENTITY_NAME]/[PRIMARY_KEY]

Updates a row identified by PRIMARY_KEY.

Example

$ curl -X 'POST' \
'http://localhost:3042/pages/1' \
-H 'accept: application/json' \
-H 'Content-Type: application/json' \
-d '{
"title": "Hello Platformatic!",
"body": "Welcome to Platformatic!"
}'

{
"id": 1,
"title": "Hello Platformatic!",
"body": "Welcome to Platformatic"
}

PUT [PLURAL_ENTITY_NAME]/[PRIMARY_KEY]

Same as POST [PLURAL_ENTITY_NAME]/[PRIMARY_KEY].

## `PUT [PLURAL_ENTITY_NAME]`

Updates all entities matching where clause

Example

$ curl -X 'PUT' \
'http://localhost:3042/pages?where.id.in=1,2' \
-H 'accept: application/json' \
-H 'Content-Type: application/json' \
-d '{
"title": "Updated title!",
"body": "Updated body!"
}'

[{
"id": 1,
"title": "Updated title!",
"body": "Updated body!"
},{
"id": 2,
"title": "Updated title!",
"body": "Updated body!"
}]

DELETE [PLURAL_ENTITY_NAME]/[PRIMARY_KEY]

Deletes a row identified by the PRIMARY_KEY.

Example

$ curl -X 'DELETE' 'http://localhost:3042/pages/1?fields=title'

{
"title": "Hello Platformatic!"
}

Nested Relationships

Let's consider the following SQL:

CREATE TABLE IF NOT EXISTS movies (
movie_id INTEGER PRIMARY KEY,
title TEXT NOT NULL
);
CREATE TABLE IF NOT EXISTS quotes (
id INTEGER PRIMARY KEY,
quote TEXT NOT NULL,
movie_id INTEGER NOT NULL REFERENCES movies(movie_id)
);

And:

  • [P_PARENT_ENTITY] is movies
  • [S_PARENT_ENTITY] is movie
  • [P_CHILDREN_ENTITY] is quotes
  • [S_CHILDREN_ENTITY] is quote

In this case, more APIs are available:

GET [P_PARENT_ENTITY]/[PARENT_PRIMARY_KEY]/[P_CHILDREN_ENTITY]

Given a 1-to-many relationship, where a parent entity can have many children, you can query for the children directly.

$ curl -X 'GET' 'http://localhost:3042/movies/1/quotes?fields=quote

[
{
"quote": "I'll be back"
},
{
"quote": "Hasta la vista, baby"
}
]

GET [P_CHILDREN_ENTITY]/[CHILDREN_PRIMARY_KEY]/[S_PARENT_ENTITY]

You can query for the parent directly, e.g.:

$ curl -X 'GET' 'http://localhost:3042/quotes/1/movie?fields=title

{
"title": "Terminator"
}

Many-to-Many Relationships

Many-to-Many relationship lets you relate each row in one table to many rows in +another table and vice versa.

Many-to-many relationship are implemented in SQL via a "join table", a table whose primary key +is composed by the identifier of the two parts of the many-to-many relationship.

Platformatic DB fully support many-to-many relationships on all supported database.

Let's consider the following SQL:

CREATE TABLE pages (
id INTEGER PRIMARY KEY,
the_title VARCHAR(42)
);

CREATE TABLE users (
id INTEGER PRIMARY KEY,
username VARCHAR(255) NOT NULL
);

CREATE TABLE editors (
page_id INTEGER NOT NULL,
user_id INTEGER NOT NULL,
role VARCHAR(255) NOT NULL,
CONSTRAINT fk_editor_pages FOREIGN KEY (page_id) REFERENCES pages(id),
CONSTRAINT fk_editor_users FOREIGN KEY (user_id) REFERENCES users(id),
PRIMARY KEY (page_id, user_id)
);

And:

  • [P_ENTITY] is editors
  • [P_REL_1] is pages
  • [S_REL_1] is page
  • [KEY_REL_1] is pages PRIMARY KEY: pages(id)
  • [P_REL_2] is users
  • [S_REL_2] is user
  • [KEY_REL_2] is users PRIMARY KEY: users(id)

In this case, here the APIs that are available for the join table:

GET [P_ENTITY]/[S_REL_1]/[KEY_REL_1]/[S_REL_2]/[KEY_REL_2]

This returns the entity in the "join table", e.g. GET /editors/page/1/user/1.

POST [P_ENTITY]/[S_REL_1]/[KEY_REL_1]/[S_REL_2]/[KEY_REL_2]

Creates a new entity in the "join table", e.g. POST /editors/page/1/user/1.

PUT [P_ENTITY]/[S_REL_1]/[KEY_REL_1]/[S_REL_2]/[KEY_REL_2]

Updates an entity in the "join table", e.g. PUT /editors/page/1/user/1.

DELETE [P_ENTITY]/[S_REL_1]/[KEY_REL_1]/[S_REL_2]/[KEY_REL_2]

Delete the entity in the "join table", e.g. DELETE /editors/page/1/user/1.

GET /[P_ENTITY]

See the above.

Offset only accepts values >= 0. Otherwise an error is return.

Pagination

The Platformatic DB supports for result's pagination through input parameters: limit and offset

Example

$ curl -X 'GET' 'http://localhost:3042/movies?limit=5&offset=10

[
{
"title": "Star Wars",
"movie_id": 10
},
...
{
"title": "007",
"movie_id": 14
}
]

It returns 5 movies starting from position 10.

TotalCount functionality can be used in order to evaluate if there are more pages.

Limit

By default a limit value (10) is applied to each request.

Clients can override this behavior by passing a value. +In this case the server validates the input and an error is return if exceeds the max accepted value (100).

Limit's values can be customized through configuration:

{
...
"db": {
...
"limit": {
"default": 50,
"max": 1000
}
}
}

Limit only accepts values >= 0. Otherwise an error is return.

Offset

By default offset is not applied to the request. +Clients can override this behavior by passing a value.

Offset only accepts values >= 0. Otherwise an error is return.

+ + + + \ No newline at end of file diff --git a/docs/0.41.2/reference/sql-openapi/ignore/index.html b/docs/0.41.2/reference/sql-openapi/ignore/index.html new file mode 100644 index 00000000000..29f966fd207 --- /dev/null +++ b/docs/0.41.2/reference/sql-openapi/ignore/index.html @@ -0,0 +1,17 @@ + + + + + +Ignoring entities and fields | Platformatic Open Source Software + + + + + +
+
Version: 0.41.2

Ignoring entities and fields

@platformatic/sql-openapi allows to selectively ignore entities and fields.

To ignore entites:

app.register(require('@platformatic/sql-openapi'), {
ignore: {
categories: true
}
})

To ignore individual fields:

app.register(require('@platformatic/sql-openapi'), {
ignore: {
categories: {
name: true
}
}
})
+ + + + \ No newline at end of file diff --git a/docs/0.41.2/reference/sql-openapi/introduction/index.html b/docs/0.41.2/reference/sql-openapi/introduction/index.html new file mode 100644 index 00000000000..ac443a7cb7b --- /dev/null +++ b/docs/0.41.2/reference/sql-openapi/introduction/index.html @@ -0,0 +1,17 @@ + + + + + +Introduction to the REST API | Platformatic Open Source Software + + + + + +
+
Version: 0.41.2

Introduction to the REST API

The Platformatic DB OpenAPI plugin automatically starts a REST API server (powered by Fastify) that provides CRUD (Create, Read, Update, Delete) functionality for each entity.

Configuration

In the config file, under the "db" section, the OpenAPI server is enabled by default. Although you can disable it setting the property openapi to false.

Example

{
...
"db": {
"openapi": false
}
}

As Platformatic DB uses fastify-swagger under the hood, the "openapi" property can be an object that follows the OpenAPI Specification Object format.

This allows you to extend the output of the Swagger UI documentation.

+ + + + \ No newline at end of file diff --git a/docs/0.41.3/category/getting-started/index.html b/docs/0.41.3/category/getting-started/index.html new file mode 100644 index 00000000000..795ca417693 --- /dev/null +++ b/docs/0.41.3/category/getting-started/index.html @@ -0,0 +1,17 @@ + + + + + +Getting Started | Platformatic Open Source Software + + + + + + + + + + \ No newline at end of file diff --git a/docs/0.41.3/category/guides/index.html b/docs/0.41.3/category/guides/index.html new file mode 100644 index 00000000000..6c2fa4a7936 --- /dev/null +++ b/docs/0.41.3/category/guides/index.html @@ -0,0 +1,17 @@ + + + + + +Guides | Platformatic Open Source Software + + + + + +
+
Version: 0.41.3

Guides

+ + + + \ No newline at end of file diff --git a/docs/0.41.3/category/packages/index.html b/docs/0.41.3/category/packages/index.html new file mode 100644 index 00000000000..7d8357b9dce --- /dev/null +++ b/docs/0.41.3/category/packages/index.html @@ -0,0 +1,17 @@ + + + + + +Packages | Platformatic Open Source Software + + + + + + + + + + \ No newline at end of file diff --git a/docs/0.41.3/category/platformatic-cloud/index.html b/docs/0.41.3/category/platformatic-cloud/index.html new file mode 100644 index 00000000000..9461ea4a78c --- /dev/null +++ b/docs/0.41.3/category/platformatic-cloud/index.html @@ -0,0 +1,17 @@ + + + + + +Platformatic Cloud | Platformatic Open Source Software + + + + + + + + + + \ No newline at end of file diff --git a/docs/0.41.3/category/reference/index.html b/docs/0.41.3/category/reference/index.html new file mode 100644 index 00000000000..001218e27b3 --- /dev/null +++ b/docs/0.41.3/category/reference/index.html @@ -0,0 +1,17 @@ + + + + + +Reference | Platformatic Open Source Software + + + + + + + + + + \ No newline at end of file diff --git a/docs/0.41.3/contributing/documentation-style-guide/index.html b/docs/0.41.3/contributing/documentation-style-guide/index.html new file mode 100644 index 00000000000..809b1d30068 --- /dev/null +++ b/docs/0.41.3/contributing/documentation-style-guide/index.html @@ -0,0 +1,74 @@ + + + + + +Documentation Style Guide | Platformatic Open Source Software + + + + + +
+
Version: 0.41.3

Documentation Style Guide

Welcome to the Platformatic Documentation Style Guide. This guide is here to provide +you with a conventional writing style for users writing developer documentation on +our Open Source framework. Each topic is precise and well explained to help you write +documentation users can easily understand and implement.

Who is This Guide For?

This guide is for anyone who loves to build with Platformatic or wants to contribute +to our documentation. You do not need to be an expert in writing technical +documentation. This guide is here to help you.

Visit CONTRIBUTING.md +file on GitHub to join our Open Source folks.

Before you Write

You should have a basic understanding of:

  • JavaScript
  • Node.js
  • Git
  • GitHub
  • Markdown
  • HTTP
  • NPM

Consider Your Audience

Before you start writing, think about your audience. In this case, your audience +should already know HTTP, JavaScript, NPM, and Node.js. It is necessary to keep +your readers in mind because they are the ones consuming your content. You want +to give as much useful information as possible. Consider the vital things they +need to know and how they can understand them. Use words and references that +readers can relate to easily. Ask for feedback from the community, it can help +you write better documentation that focuses on the user and what you want to +achieve.

Get Straight to the Point

Give your readers a clear and precise action to take. Start with what is most +important. This way, you can help them find what they need faster. Mostly, +readers tend to read the first content on a page, and many will not scroll +further.

Example

Less like this:

Colons are very important to register a parametric path. It lets +the framework know there is a new parameter created. You can place the colon +before the parameter name so the parametric path can be created.

More Like this:

To register a parametric path, put a colon before the parameter +name. Using a colon lets the framework know it is a parametric path and not a +static path.

Images and Video Should Enhance the Written Documentation

Images and video should only be added if they complement the written +documentation, for example to help the reader form a clearer mental model of a +concept or pattern.

Images can be directly embedded, but videos should be included by linking to an +external site, such as YouTube. You can add links by using +[Title](https://www.websitename.com) in the Markdown.

Avoid Plagiarism

Make sure you avoid copying other people's work. Keep it as original as +possible. You can learn from what they have done and reference where it is from +if you used a particular quote from their work.

Word Choice

There are a few things you need to use and avoid when writing your documentation +to improve readability for readers and make documentation neat, direct, and +clean.

When to use the Second Person "you" as the Pronoun

When writing articles or guides, your content should communicate directly to +readers in the second person ("you") addressed form. It is easier to give them +direct instruction on what to do on a particular topic. To see an example, visit +the Quick Start Guide.

Example

Less like this:

We can use the following plugins.

More like this:

You can use the following plugins.

According to Wikipedia, You is usually a second person pronoun. +Also, used to refer to an indeterminate person, as a more common alternative +to a very formal indefinite pronoun.

To recap, use "you" when writing articles or guides.

When to Avoid the Second Person "you" as the Pronoun

One of the main rules of formal writing such as reference documentation, or API +documentation, is to avoid the second person ("you") or directly addressing the +reader.

Example

Less like this:

You can use the following recommendation as an example.

More like this:

As an example, the following recommendations should be +referenced.

To view a live example, refer to the Decorators +reference document.

To recap, avoid "you" in reference documentation or API documentation.

Avoid Using Contractions

Contractions are the shortened version of written and spoken forms of a word, +i.e. using "don't" instead of "do not". Avoid contractions to provide a more +formal tone.

Avoid Using Condescending Terms

Condescending terms are words that include:

  • Just
  • Easy
  • Simply
  • Basically
  • Obviously

The reader may not find it easy to use Platformatic; avoid +words that make it sound simple, easy, offensive, or insensitive. Not everyone +who reads the documentation has the same level of understanding.

Starting With a Verb

Mostly start your description with a verb, which makes it simple and precise for +the reader to follow. Prefer using present tense because it is easier to read +and understand than the past or future tense.

Example

Less like this:

There is a need for Node.js to be installed before you can be +able to use Platformatic.

More like this:

Install Node.js to make use of Platformatic.

Grammatical Moods

Grammatical moods are a great way to express your writing. Avoid sounding too +bossy while making a direct statement. Know when to switch between indicative, +imperative, and subjunctive moods.

Indicative - Use when making a factual statement or question.

Example

Since there is no testing framework available, "Platformatic recommends ways +to write tests".

Imperative - Use when giving instructions, actions, commands, or when you +write your headings.

Example

Install dependencies before starting development.

Subjunctive - Use when making suggestions, hypotheses, or non-factual +statements.

Example

Reading the documentation on our website is recommended to get +comprehensive knowledge of the framework.

Use Active Voice Instead of Passive

Using active voice is a more compact and direct way of conveying your +documentation.

Example

Passive:

The node dependencies and packages are installed by npm.

Active:

npm installs packages and node dependencies.

Writing Style

Documentation Titles

When creating a new guide, API, or reference in the /docs/ directory, use +short titles that best describe the topic of your documentation. Name your files +in kebab-cases and avoid Raw or camelCase. To learn more about kebab-case you +can visit this medium article on Case +Styles.

Examples:

hook-and-plugins.md

adding-test-plugins.md

removing-requests.md

Hyperlinks should have a clear title of what it references. Here is how your +hyperlink should look:

<!-- More like this -->

// Add clear & brief description
[Fastify Plugins] (https://www.fastify.io/docs/latest/Plugins/)

<!--Less like this -->

// incomplete description
[Fastify] (https://www.fastify.io/docs/latest/Plugins/)

// Adding title in link brackets
[](https://www.fastify.io/docs/latest/Plugins/ "fastify plugin")

// Empty title
[](https://www.fastify.io/docs/latest/Plugins/)

// Adding links localhost URLs instead of using code strings (``)
[http://localhost:3000/](http://localhost:3000/)

Include in your documentation as many essential references as possible, but +avoid having numerous links when writing to avoid distractions.

+ + + + \ No newline at end of file diff --git a/docs/0.41.3/contributing/index.html b/docs/0.41.3/contributing/index.html new file mode 100644 index 00000000000..bec19939b8a --- /dev/null +++ b/docs/0.41.3/contributing/index.html @@ -0,0 +1,18 @@ + + + + + +Contributing | Platformatic Open Source Software + + + + + +
+
+ + + + \ No newline at end of file diff --git a/docs/0.41.3/getting-started/architecture/index.html b/docs/0.41.3/getting-started/architecture/index.html new file mode 100644 index 00000000000..240e58bd4ab --- /dev/null +++ b/docs/0.41.3/getting-started/architecture/index.html @@ -0,0 +1,25 @@ + + + + + +Architecture | Platformatic Open Source Software + + + + + +
+
Version: 0.41.3

Architecture

Platformatic is a collection of Open Source tools designed to eliminate friction +in backend development. The first of those tools is Platformatic DB, which is developed +as @platformatic/db.

Platformatic DB

Platformatic DB can expose a SQL database by dynamically mapping it to REST/OpenAPI +and GraphQL endpoints. It supports a limited subset of the SQL query language, but +also allows developers to add their own custom routes and resolvers.

Platformatic DB Architecture

Platformatic DB is composed of a few key libraries:

  1. @platformatic/sql-mapper - follows the Data Mapper pattern to build an API on top of a SQL database. +Internally it uses the @database project.
  2. @platformatic/sql-openapi - uses sql-mapper to create a series of REST routes and matching OpenAPI definitions. +Internally it uses @fastify/swagger.
  3. @platformatic/sql-graphql - uses sql-mapper to create a GraphQL endpoint and schema. sql-graphql also support Federation. +Internally it uses mercurius.

Platformatic DB allows you to load a Fastify plugin during server startup that contains your own application-specific code. +The plugin can add more routes or resolvers — these will automatically be shown in the OpenAPI and GraphQL schemas.

SQL database migrations are also supported. They're implemented internally with the postgrator library.

+ + + + \ No newline at end of file diff --git a/docs/0.41.3/getting-started/movie-quotes-app-tutorial/index.html b/docs/0.41.3/getting-started/movie-quotes-app-tutorial/index.html new file mode 100644 index 00000000000..2649987fae2 --- /dev/null +++ b/docs/0.41.3/getting-started/movie-quotes-app-tutorial/index.html @@ -0,0 +1,129 @@ + + + + + +Movie Quotes App Tutorial | Platformatic Open Source Software + + + + + +
+
Version: 0.41.3

Movie Quotes App Tutorial

This tutorial will help you learn how to build a full stack application on top +of Platformatic DB. We're going to build an application that allows us to +save our favourite movie quotes. We'll also be building in custom API functionality +that allows for some neat user interaction on our frontend.

You can find the complete code for the application that we're going to build +on GitHub.

note

We'll be building the frontend of our application with the Astro +framework, but the GraphQL API integration steps that we're going to cover can +be applied with most frontend frameworks.

What we're going to cover

In this tutorial we'll learn how to:

  • Create a Platformatic API
  • Apply database migrations
  • Create relationships between our API entities
  • Populate our database tables
  • Build a frontend application that integrates with our GraphQL API
  • Extend our API with custom functionality
  • Enable CORS on our Platformatic API

Prerequisites

To follow along with this tutorial you'll need to have these things installed:

You'll also need to have some experience with JavaScript, and be comfortable with +running commands in a terminal.

Build the backend

Create a Platformatic API

First, let's create our project directory:

mkdir -p tutorial-movie-quotes-app/apps/movie-quotes-api/

cd tutorial-movie-quotes-app/apps/movie-quotes-api/

Run this command in your terminal to start the Platformatic creator wizard:

npm create platformatic@latest

This interactive command-line tool will ask you some questions about how you'd +like to set up your new Platformatic project. For this guide, select these options:

- Which kind of project do you want to create?  => DB
- Where would you like to create your project? => quick-start
- Do you want to create default migrations? => Yes
- Do you want to create a plugin? => Yes
- Do you want to use TypeScript? => No
- Do you want to install dependencies? => Yes (this can take a while)
- Do you want to apply the migrations? => Yes
- Do you want to generate types? => Yes
- Do you want to create the github action to deploy this application to Platformatic Cloud dynamic workspace? => No
- Do you want to create the github action to deploy this application to Platformatic Cloud static workspace? => No

Once the wizard is complete, you'll have a Platformatic app project in the +folder quick-start, with example migration files and a plugin script.

info

Make sure you run the npm/yarn/pnpm command install command manually if you +don't ask the wizard to do it for you.

Define the database schema

Let's create a new directory to store our migration files:

mkdir migrations

Then we'll create a migration file named 001.do.sql in the migrations +directory:

CREATE TABLE quotes (
id INTEGER PRIMARY KEY,
quote TEXT NOT NULL,
said_by VARCHAR(255) NOT NULL,
created_at DATETIME DEFAULT CURRENT_TIMESTAMP
);

Now let's setup migrations in our Platformatic configuration +file, platformatic.db.json:

{
"$schema": "https://platformatic.dev/schemas/v0.23.2/db",
"server": {
"hostname": "{PLT_SERVER_HOSTNAME}",
"port": "{PORT}",
"logger": {
"level": "{PLT_SERVER_LOGGER_LEVEL}"
}
},
"db": {
"connectionString": "{DATABASE_URL}",
"graphql": true,
"openapi": true
},
"plugins": {
"paths": [
"plugin.js"
]
},
"types": {
"autogenerate": true
},
"migrations": {
"dir": "migrations",
"autoApply": true
}
}
info

Take a look at the Configuration reference +to see all the supported configuration settings.

Now we can start the Platformatic DB server:

npm run start

Our Platformatic DB server should start, and we'll see messages like these:

[11:26:48.772] INFO (15235): running 001.do.sql
[11:26:48.864] INFO (15235): server listening
url: "http://127.0.0.1:3042"

Let's open a new terminal and make a request to our server's REST API that +creates a new quote:

curl --request POST --header "Content-Type: application/json" \
-d "{ \"quote\": \"Toto, I've got a feeling we're not in Kansas anymore.\", \"saidBy\": \"Dorothy Gale\" }" \
http://localhost:3042/quotes

We should receive a response like this from the API:

{"id":1,"quote":"Toto, I've got a feeling we're not in Kansas anymore.","saidBy":"Dorothy Gale","createdAt":"1684167422600"}

Create an entity relationship

Now let's create a migration file named 002.do.sql in the migrations +directory:

CREATE TABLE movies (
id INTEGER PRIMARY KEY,
name TEXT NOT NULL UNIQUE
);

ALTER TABLE quotes ADD COLUMN movie_id INTEGER REFERENCES movies(id);

This SQL will create a new movies database table and also add a movie_id +column to the quotes table. This will allow us to store movie data in the +movies table and then reference them by ID in our quotes table.

Let's stop the Platformatic DB server with Ctrl + C, and then start it again:

npm run start

The new migration should be automatically applied and we'll see the log message +running 002.do.sql.

Our Platformatic DB server also provides a GraphQL API. Let's open up the GraphiQL +application in our web browser:

http://localhost:3042/graphiql

Now let's run this query with GraphiQL to add the movie for the quote that we +added earlier:

mutation {
saveMovie(input: { name: "The Wizard of Oz" }) {
id
}
}

We should receive a response like this from the API:

{
"data": {
"saveMovie": {
"id": "1"
}
}
}

Now we can update our quote to reference the movie:

mutation {
saveQuote(input: { id: 1, movieId: 1 }) {
id
quote
saidBy
createdAt
movie {
id
name
}
}
}

We should receive a response like this from the API:

{
"data": {
"saveQuote": {
"id": "1",
"quote": "Toto, I've got a feeling we're not in Kansas anymore.",
"saidBy": "Dorothy Gale",
"movie": {
"id": "1",
"name": "The Wizard of Oz"
}
}
}
}

Our Platformatic DB server has automatically identified the relationship +between our quotes and movies database tables. This allows us to make +GraphQL queries that retrieve quotes and their associated movies at the same +time. For example, to retrieve all quotes from our database we can run:

query {
quotes {
id
quote
saidBy
createdAt
movie {
id
name
}
}
}

To view the GraphQL schema that's generated for our API by Platformatic DB, +we can run this command in our terminal:

npx platformatic db schema graphql

The GraphQL schema shows all of the queries and mutations that we can run +against our GraphQL API, as well as the types of data that it expects as input.

Populate the database

Our movie quotes database is looking a little empty! We're going to create a +"seed" script to populate it with some data.

Let's create a new file named seed.js and copy and paste in this code:

'use strict'

const quotes = [
{
quote: "Toto, I've got a feeling we're not in Kansas anymore.",
saidBy: 'Dorothy Gale',
movie: 'The Wizard of Oz'
},
{
quote: "You're gonna need a bigger boat.",
saidBy: 'Martin Brody',
movie: 'Jaws'
},
{
quote: 'May the Force be with you.',
saidBy: 'Han Solo',
movie: 'Star Wars'
},
{
quote: 'I have always depended on the kindness of strangers.',
saidBy: 'Blanche DuBois',
movie: 'A Streetcar Named Desire'
}
]

module.exports = async function ({ entities, db, sql }) {
for (const values of quotes) {
const movie = await entities.movie.save({ input: { name: values.movie } })

console.log('Created movie:', movie)

const quote = {
quote: values.quote,
saidBy: values.saidBy,
movieId: movie.id
}

await entities.quote.save({ input: quote })

console.log('Created quote:', quote)
}
}
info

Take a look at the Seed a Database guide to learn more +about how database seeding works with Platformatic DB.

Let's stop our Platformatic DB server running and remove our SQLite database:

rm db.sqlite

Now let's create a fresh SQLite database by running our migrations:

npx platformatic db migrations apply

And then let's populate the quotes and movies tables with data using our +seed script:

npx platformatic db seed seed.js

Our database is full of data, but we don't have anywhere to display it. It's +time to start building our frontend!

Build the frontend

We're now going to use Astro to build our frontend +application. If you've not used it before, you might find it helpful +to read this overview +on how Astro components are structured.

tip

Astro provide some extensions and tools to help improve your +Editor Setup when building an +Astro application.

Create an Astro application

In the root tutorial-movie-quotes-app of our project, let's create a new directory for our frontent +application:

mkdir -p apps/movie-quotes-frontend/

cd apps/movie-quotes-frontend/

And then we'll create a new Astro project:

npm create astro@latest -- --template basics

It will ask you some questions about how you'd like to set up +your new Astro project. For this guide, select these options:

Where should we create your new project?

   .
◼ tmpl Using basics as project template
✔ Template copied

Install dependencies? (it's buggy, we'll do it afterwards)

   No
◼ No problem! Remember to install dependencies after setup.

Do you plan to write TypeScript?

   No
◼ No worries! TypeScript is supported in Astro by default, but you are free to continue writing JavaScript instead.

Initialize a new git repository?

   No
◼ Sounds good! You can always run git init manually.

Liftoff confirmed. Explore your project!
Run npm dev to start the dev server. CTRL+C to stop.
Add frameworks like react or tailwind using astro add.

Now we'll edit our Astro configuration file, astro.config.mjs and +copy and paste in this code:

import { defineConfig } from 'astro/config'

// https://astro.build/config
export default defineConfig({
output: 'server'
})

And we'll also edit our tsconfig.json file and add in this configuration:

{
"extends": "astro/tsconfigs/base",
"compilerOptions": {
"types": ["astro/client"]
}
}

Now we can start up the Astro development server with:

npm run dev

And then load up the frontend in our browser at http://localhost:3000

Now that everything is working, we'll remove all default *.astro files from the src/ directory, but we'll keep the directory structure. You can delete them now, or override them later.

Create a layout

In the src/layouts directory, let's create a new file named Layout.astro:

---
export interface Props {
title: string;
page?: string;
}
const { title, page } = Astro.props;
---

<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width" />
<title>{title}</title>
</head>
<body>
<header>
<h1>🎬 Movie Quotes</h1>
</header>
<nav>
<a href="/">All quotes</a>
</nav>
<section>
<slot />
</section>
</body>
</html>

The code between the --- is known as the component script, and the +code after that is the component template. The component script will only run +on the server side when a web browser makes a request. The component template +is rendered server side and sent back as an HTML response to the web browser.

Now we'll update src/pages/index.astro to use this Layout component. +Let's replace the contents of src/pages/index.astro with this code:

---
import Layout from '../layouts/Layout.astro';
---

<Layout title="All quotes" page="listing">
<main>
<p>We'll list all the movie quotes here.</p>
</main>
</Layout>

Integrate the urql GraphQL client

We're now going to integrate the URQL +GraphQL client into our frontend application. This will allow us to run queries +and mutations against our Platformatic GraphQL API.

Let's first install @urql/core and +graphql as project dependencies:

npm install @urql/core graphql

Then let's create a new .env file and add this configuration:

PUBLIC_GRAPHQL_API_ENDPOINT=http://127.0.0.1:3042/graphql

Now we'll create a new directory:

mkdir src/lib

And then create a new file named src/lib/quotes-api.js. In that file we'll +create a new URQL client:

// src/lib/quotes-api.js

import { createClient, cacheExchange, fetchExchange } from '@urql/core';

const graphqlClient = createClient({
url: import.meta.env.PUBLIC_GRAPHQL_API_ENDPOINT,
requestPolicy: "network-only",
exchanges: [cacheExchange, fetchExchange]
});

We'll also add a thin wrapper around the client that does some basic error +handling for us:

// src/lib/quotes-api.js

async function graphqlClientWrapper(method, gqlQuery, queryVariables = {}) {
const queryResult = await graphqlClient[method](
gqlQuery,
queryVariables
).toPromise();

if (queryResult.error) {
console.error("GraphQL error:", queryResult.error);
}

return {
data: queryResult.data,
error: queryResult.error,
};
}

export const quotesApi = {
async query(gqlQuery, queryVariables = {}) {
return await graphqlClientWrapper("query", gqlQuery, queryVariables);
},
async mutation(gqlQuery, queryVariables = {}) {
return await graphqlClientWrapper("mutation", gqlQuery, queryVariables);
}
}

And lastly, we'll export gql from the @urql/core package, to make it +simpler for us to write GraphQL queries in our pages:

// src/lib/quotes-api.js

export { gql } from "@urql/core";

Stop the Astro dev server and then start it again so it picks up the .env +file:

npm run dev

Display all quotes

Let's display all the movie quotes in src/pages/index.astro.

First, we'll update the component script at the top and add in a query to +our GraphQL API for quotes:

---
import Layout from '../layouts/Layout.astro';
import { quotesApi, gql } from '../lib/quotes-api';

const { data } = await quotesApi.query(gql`
query {
quotes {
id
quote
saidBy
createdAt
movie {
id
name
}
}
}
`);

const quotes = data?.quotes || [];
---

Then we'll update the component template to display the quotes:

<Layout title="All quotes" page="listing">
<main>
{quotes.length > 0 ? quotes.map((quote) => (
<div>
<blockquote>
<p>{quote.quote}</p>
</blockquote>
<p>
{quote.saidBy}, {quote.movie?.name}
</p>
<div>
<span>Added {new Date(Number(quote.createdAt)).toUTCString()}</span>
</div>
</div>
)) : (
<p>No movie quotes have been added.</p>
)}
</main>
</Layout>

And just like that, we have all the movie quotes displaying on the page!

Integrate Tailwind for styling

Automatically add the @astrojs/tailwind integration:

npx astro add tailwind --yes

Add the Tailwind CSS Typography +and Forms plugins:

npm install --save-dev @tailwindcss/typography @tailwindcss/forms

Import the plugins in our Tailwind configuration file:

// tailwind.config.cjs

/** @type {import('tailwindcss').Config} */
module.exports = {
content: ['./src/**/*.{astro,html,js,jsx,md,mdx,svelte,ts,tsx,vue}'],
theme: {
extend: {}
},
plugins: [
require('@tailwindcss/forms'),
require('@tailwindcss/typography')
]
}

Stop the Astro dev server and then start it again so it picks up all the +configuration changes:

npm run dev

Style the listing page

To style our listing page, let's add CSS classes to the component template in +src/layouts/Layout.astro:

---
export interface Props {
title: string;
page?: string;
}

const { title, page } = Astro.props;

const navActiveClasses = "font-bold bg-yellow-400 no-underline";
---

<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width" />
<title>{title}</title>
</head>
<body class="py-8">
<header class="prose mx-auto mb-6">
<h1>🎬 Movie Quotes</h1>
</header>
<nav class="prose mx-auto mb-6 border-y border-gray-200 flex">
<a href="/" class={`p-3 ${page === "listing" && navActiveClasses}`}>All quotes</a>
</nav>
<section class="prose mx-auto">
<slot />
</section>
</body>
</html>

Then let's add CSS classes to the component template in src/pages/index.astro:

<Layout title="All quotes">
<main>
{quotes.length > 0 ? quotes.map((quote) => (
<div class="border-b mb-6">
<blockquote class="text-2xl mb-0">
<p class="mb-4">{quote.quote}</p>
</blockquote>
<p class="text-xl mt-0 mb-8 text-gray-400">
{quote.saidBy}, {quote.movie?.name}
</p>
<div class="flex flex-col mb-6 text-gray-400">
<span class="text-gray-400 italic">Added {new Date(Number(quote.createdAt)).toUTCString()}</span>
</div>
</div>
)) : (
<p>No movie quotes have been added.</p>
)}
</main>
</Layout>

Our listing page is now looking much more user friendly!

Create an add quote page

We're going to create a form component that we can use for adding and editing +quotes.

First let's create a new component file, src/components/QuoteForm.astro:

---
export interface QuoteFormData {
id?: number;
quote?: string;
saidBy?: string;
movie?: string;
}

export interface Props {
action: string;
values?: QuoteFormData;
saveError?: boolean;
loadError?: boolean;
submitLabel: string;
}

const { action, values = {}, saveError, loadError, submitLabel } = Astro.props;
---

{saveError && <p class="text-lg bg-red-200 p-4">There was an error saving the quote. Please try again.</p>}
{loadError && <p class="text-lg bg-red-200 p-4">There was an error loading the quote. Please try again.</p>}

<form method="post" action={action} class="grid grid-cols-1 gap-6">
<label for="quote" class="block">
<span>Quote</span>
<textarea id="quote" name="quote" required="required" class="mt-1 w-full">{values.quote}</textarea>
</label>
<label for="said-by" class="block">
<span>Said by</span>
<input type="text" id="said-by" name="saidBy" required="required" value={values.saidBy} class="mt-1 w-full">
</label>
<label for="movie" class="block">
<span>Movie</span>
<input type="text" id="movie" name="movie" required="required" autocomplete="off" value={values.movie} class="form-input mt-1 w-full">
</label>
<input type="submit" value={submitLabel} disabled={loadError && "disabled"} class="bg-yellow-400 hover:bg-yellow-500 text-gray-900 round p-3" />
</form>

Create a new page file, src/pages/add.astro:

---
import Layout from '../layouts/Layout.astro';
import QuoteForm from '../components/QuoteForm.astro';
import type { QuoteFormData } from '../components/QuoteForm.astro';

let formData: QuoteFormData = {};
let saveError = false;
---

<Layout title="Add a movie quote" page="add">
<main>
<h2>Add a quote</h2>
<QuoteForm action="/add" values={formData} saveError={saveError} submitLabel="Add quote" />
</main>
</Layout>

And now let's add a link to this page in the layout navigation in src/layouts/Layout.astro:

<nav class="prose mx-auto mb-6 border-y border-gray-200 flex">
<a href="/" class={`p-3 ${page === "listing" && navActiveClasses}`}>All quotes</a>
<a href="/add" class={`p-3 ${page === "add" && navActiveClasses}`}>Add a quote</a>
</nav>

Send form data to the API

When a user submits the add quote form we want to send the form data to our API +so it can then save it to our database. Let's wire that up now.

First we're going to create a new file, src/lib/request-utils.js:

export function isPostRequest (request) {
return request.method === 'POST'
}

export async function getFormData (request) {
const formData = await request.formData()

return Object.fromEntries(formData.entries())
}

Then let's update the component script in src/pages/add.astro to use +these new request utility functions:

---
import Layout from '../layouts/Layout.astro';
import QuoteForm from '../components/QuoteForm.astro';
import type { QuoteFormData } from '../components/QuoteForm.astro';

import { isPostRequest, getFormData } from '../lib/request-utils';

let formData: QuoteFormData = {};
let saveError = false;

if (isPostRequest(Astro.request)) {
formData = await getFormData(Astro.request);
}
---

When we create a new quote entity record via our API, we need to include a +movieId field that references a movie entity record. This means that when a +user submits the add quote form we need to:

  • Check if a movie entity record already exists with that movie name
  • Return the movie id if it does exist
  • If it doesn't exist, create a new movie entity record and return the movie ID

Let's update the import statement at the top of src/lib/quotes-api.js

-import { createClient } from '@urql/core'
+import { createClient, gql } from '@urql/core'

And then add a new method that will return a movie ID for us:

async function getMovieId (movieName) {
movieName = movieName.trim()

let movieId = null

// Check if a movie already exists with the provided name.
const queryMoviesResult = await quotesApi.query(
gql`
query ($movieName: String!) {
movies(where: { name: { eq: $movieName } }) {
id
}
}
`,
{ movieName }
)

if (queryMoviesResult.error) {
return null
}

const movieExists = queryMoviesResult.data?.movies.length === 1
if (movieExists) {
movieId = queryMoviesResult.data.movies[0].id
} else {
// Create a new movie entity record.
const saveMovieResult = await quotesApi.mutation(
gql`
mutation ($movieName: String!) {
saveMovie(input: { name: $movieName }) {
id
}
}
`,
{ movieName }
)

if (saveMovieResult.error) {
return null
}

movieId = saveMovieResult.data?.saveMovie.id
}

return movieId
}

And let's export it too:

export const quotesApi = {
async query (gqlQuery, queryVariables = {}) {
return await graphqlClientWrapper('query', gqlQuery, queryVariables)
},
async mutation (gqlQuery, queryVariables = {}) {
return await graphqlClientWrapper('mutation', gqlQuery, queryVariables)
},
getMovieId
}

Now we can wire up the last parts in the src/pages/add.astro component +script:

---
import Layout from '../layouts/Layout.astro';
import QuoteForm from '../components/QuoteForm.astro';
import type { QuoteFormData } from '../components/QuoteForm.astro';

import { quotesApi, gql } from '../lib/quotes-api';
import { isPostRequest, getFormData } from '../lib/request-utils';

let formData: QuoteFormData = {};
let saveError = false;

if (isPostRequest(Astro.request)) {
formData = await getFormData(Astro.request);

const movieId = await quotesApi.getMovieId(formData.movie);

if (movieId) {
const quote = {
quote: formData.quote,
saidBy: formData.saidBy,
movieId,
};

const { error } = await quotesApi.mutation(gql`
mutation($quote: QuoteInput!) {
saveQuote(input: $quote) {
id
}
}
`, { quote });

if (!error) {
return Astro.redirect('/');
} else {
saveError = true;
}
} else {
saveError = true;
}
}

Add autosuggest for movies

We can create a better experience for our users by autosuggesting the movie name +when they're adding a new quote.

Let's open up src/components/QuoteForm.astro and import our API helper methods +in the component script:

import { quotesApi, gql } from '../lib/quotes-api.js';

Then let's add in a query to our GraphQL API for all movies:

const { data } = await quotesApi.query(gql`
query {
movies {
name
}
}
`);

const movies = data?.movies || [];

Now lets update the Movie field in the component template to use the +array of movies that we've retrieved from the API:

<label for="movie" class="block">
<span>Movie</span>
<input list="movies" id="movie" name="movie" required="required" autocomplete="off" value={values.movie} class="form-input mt-1 w-full">
<datalist id="movies">
{movies.map(({ name }) => (
<option>{name}</option>
))}
</datalist>
</label>

Create an edit quote page

Let's create a new directory, src/pages/edit/:

mkdir src/pages/edit/

And inside of it, let's create a new page, [id].astro:

---
import Layout from '../../layouts/Layout.astro';
import QuoteForm, { QuoteFormData } from '../../components/QuoteForm.astro';

const id = Number(Astro.params.id);

let formValues: QuoteFormData = {};
let loadError = false;
let saveError = false;
---

<Layout title="Edit movie quote">
<main>
<h2>Edit quote</h2>
<QuoteForm action={`/edit/${id}`} values={formValues} saveError={saveError} loadError={loadError} submitLabel="Update quote" />
</main>
</Layout>

You'll see that we're using the same QuoteForm component that our add quote +page uses. Now we're going to wire up our edit page so that it can load an +existing quote from our API and save changes back to the API when the form is +submitted.

In the [id.astro] component script, let's add some code to take care of +these tasks:

---
import Layout from '../../layouts/Layout.astro';
import QuoteForm, { QuoteFormData } from '../../components/QuoteForm.astro';

import { quotesApi, gql } from '../../lib/quotes-api';
import { isPostRequest, getFormData } from '../../lib/request-utils';

const id = Number(Astro.params.id);

let formValues: QuoteFormData = {};
let loadError = false;
let saveError = false;

if (isPostRequest(Astro.request)) {
const formData = await getFormData(Astro.request);
formValues = formData;

const movieId = await quotesApi.getMovieId(formData.movie);

if (movieId) {
const quote = {
id,
quote: formData.quote,
saidBy: formData.saidBy,
movieId,
};

const { error } = await quotesApi.mutation(gql`
mutation($quote: QuoteInput!) {
saveQuote(input: $quote) {
id
}
}
`, { quote });

if (!error) {
return Astro.redirect('/');
} else {
saveError = true;
}
} else {
saveError = true;
}
} else {
const { data } = await quotesApi.query(gql`
query($id: ID!) {
getQuoteById(id: $id) {
id
quote
saidBy
movie {
id
name
}
}
}
`, { id });

if (data?.getQuoteById) {
formValues = {
...data.getQuoteById,
movie: data.getQuoteById.movie.name
};
} else {
loadError = true;
}
}
---

Load up http://localhost:3000/edit/1 in your +browser to test out the edit quote page.

Now we're going to add edit links to the quotes listing page. Let's start by +creating a new component src/components/QuoteActionEdit.astro:

---
export interface Props {
id: number;
}

const { id } = Astro.props;
---
<a href={`/edit/${id}`} class="flex items-center mr-5 text-gray-400 hover:text-yellow-600 underline decoration-yellow-600 decoration-2 underline-offset-4">
<svg class="w-6 h-6 mr-1" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24" fill="currentColor" class="w-6 h-6">
<path d="M21.731 2.269a2.625 2.625 0 00-3.712 0l-1.157 1.157 3.712 3.712 1.157-1.157a2.625 2.625 0 000-3.712zM19.513 8.199l-3.712-3.712-8.4 8.4a5.25 5.25 0 00-1.32 2.214l-.8 2.685a.75.75 0 00.933.933l2.685-.8a5.25 5.25 0 002.214-1.32l8.4-8.4z" />
<path d="M5.25 5.25a3 3 0 00-3 3v10.5a3 3 0 003 3h10.5a3 3 0 003-3V13.5a.75.75 0 00-1.5 0v5.25a1.5 1.5 0 01-1.5 1.5H5.25a1.5 1.5 0 01-1.5-1.5V8.25a1.5 1.5 0 011.5-1.5h5.25a.75.75 0 000-1.5H5.25z" />
</svg>
<span class="hover:underline hover:decoration-yellow-600">Edit</span>
</a>

Then let's import this component and use it in our listing page, +src/pages/index.astro:

---
import Layout from '../layouts/Layout.astro';
import QuoteActionEdit from '../components/QuoteActionEdit.astro';
import { quotesApi, gql } from '../lib/quotes-api';

// ...
---

<Layout title="All quotes" page="listing">
<main>
{quotes.length > 0 ? quotes.map((quote) => (
<div class="border-b mb-6">
...
<div class="flex flex-col mb-6 text-gray-400">
<span class="flex items-center">
<QuoteActionEdit id={quote.id} />
</span>
<span class="mt-4 text-gray-400 italic">Added {new Date(Number(quote.createdAt)).toUTCString()}</span>
</div>
</div>
)) : (
<p>No movie quotes have been added.</p>
)}
</main>
</Layout>

Add delete quote functionality

Our Movie Quotes app can create, retrieve and update quotes. Now we're going +to implement the D in CRUD — delete!

First let's create a new component, src/components/QuoteActionDelete.astro:

---
export interface Props {
id: number;
}

const { id } = Astro.props;
---
<form method="POST" action={`/delete/${id}`} class="form-delete-quote m-0">
<button type="submit" class="flex items-center text-gray-400 hover:text-red-700 underline decoration-red-700 decoration-2 underline-offset-4">
<svg class="w-6 h-6 mr-1" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24" fill="currentColor" class="w-6 h-6">
<path fill-rule="evenodd" d="M12 2.25c-5.385 0-9.75 4.365-9.75 9.75s4.365 9.75 9.75 9.75 9.75-4.365 9.75-9.75S17.385 2.25 12 2.25zm-1.72 6.97a.75.75 0 10-1.06 1.06L10.94 12l-1.72 1.72a.75.75 0 101.06 1.06L12 13.06l1.72 1.72a.75.75 0 101.06-1.06L13.06 12l1.72-1.72a.75.75 0 10-1.06-1.06L12 10.94l-1.72-1.72z" clip-rule="evenodd" />
</svg>
<span>Delete</span>
</button>
</form>

And then we'll drop it into our listing page, src/pages/index.astro:

---
import Layout from '../layouts/Layout.astro';
import QuoteActionEdit from '../components/QuoteActionEdit.astro';
import QuoteActionDelete from '../components/QuoteActionDelete.astro';
import { quotesApi, gql } from '../lib/quotes-api';

// ...
---

<Layout title="All quotes" page="listing">
<main>
{quotes.length > 0 ? quotes.map((quote) => (
<div class="border-b mb-6">
...
<div class="flex flex-col mb-6 text-gray-400">
<span class="flex items-center">
<QuoteActionEdit id={quote.id} />
<QuoteActionDelete id={quote.id} />
</span>
<span class="mt-4 text-gray-400 italic">Added {new Date(Number(quote.createdAt)).toUTCString()}</span>
</div>
</div>
...

At the moment when a delete form is submitted from our listing page, we get +an Astro 404 page. Let's fix this by creating a new directory, src/pages/delete/:

mkdir src/pages/delete/

And inside of it, let's create a new page, [id].astro:

---
import Layout from '../../layouts/Layout.astro';

import { quotesApi, gql } from '../../lib/quotes-api';
import { isPostRequest } from '../../lib/request-utils';

if (isPostRequest(Astro.request)) {
const id = Number(Astro.params.id);

const { error } = await quotesApi.mutation(gql`
mutation($id: ID!) {
deleteQuotes(where: { id: { eq: $id }}) {
id
}
}
`, { id });

if (!error) {
return Astro.redirect('/');
}
}
---
<Layout title="Delete movie quote">
<main>
<h2>Delete quote</h2>
<p class="text-lg bg-red-200 p-4">There was an error deleting the quote. Please try again.</p>
</main>
</Layout>

Now if we click on a delete quote button on our listings page, it should call our +GraphQL API to delete the quote. To make this a little more user friendly, let's +add in a confirmation dialog so that users don't delete a quote by accident.

Let's create a new directory, src/scripts/:

mkdir src/scripts/

And inside of that directory let's create a new file, quote-actions.js:

// src/scripts/quote-actions.js

export function confirmDeleteQuote (form) {
if (confirm('Are you sure want to delete this quote?')) {
form.submit()
}
}

Then we can pull it in as client side JavaScript on our listing page, +src/pages/index.astro:

<Layout>
...
</Layout>

<script>
import { confirmDeleteQuote } from '../scripts/quote-actions.js'

addEventListener('DOMContentLoaded', () => {
document.querySelectorAll('.form-delete-quote').forEach((deleteForm) => {
deleteForm.addEventListener('submit', (event) => {
event.preventDefault()
confirmDeleteQuote(event.currentTarget)
})
})
})
</script>

Build a "like" quote feature

We've built all the basic CRUD (Create, Retrieve, Update & Delete) features +into our application. Now let's build a feature so that users can interact +and "like" their favourite movie quotes.

To build this feature we're going to add custom functionality to our API +and then add a new component, along with some client side JavaScript, to +our frontend.

Create an API migration

We're now going to work on the code for API, under the apps/movie-quotes-api +directory.

First let's create a migration that adds a likes column to our quotes +database table. We'll create a new migration file, migrations/003.do.sql:

ALTER TABLE quotes ADD COLUMN likes INTEGER default 0;

This migration will automatically be applied when we next start our Platformatic +API.

Create an API plugin

To add custom functionality to our Platformatic API, we need to create a +Fastify plugin and +update our API configuration to use it.

Let's create a new file, plugin.js, and inside it we'll add the skeleton +structure for our plugin:

// plugin.js

'use strict'

module.exports = async function plugin (app) {
app.log.info('plugin loaded')
}

Now let's register our plugin in our API configuration file, platformatic.db.json:

{
...
"migrations": {
"dir": "./migrations"
},
"plugins": {
"paths": ["./plugin.js"]
}
}

And then we'll start up our Platformatic API:

npm run dev

We should see log messages that tell us that our new migration has been +applied and our plugin has been loaded:

[10:09:20.052] INFO (146270): running 003.do.sql
[10:09:20.129] INFO (146270): plugin loaded
[10:09:20.209] INFO (146270): server listening
url: "http://127.0.0.1:3042"

Now it's time to start adding some custom functionality inside our plugin.

Add a REST API route

We're going to add a REST route to our API that increments the count of +likes for a specific quote: /quotes/:id/like

First let's add fluent-json-schema as a dependency for our API:

npm install fluent-json-schema

We'll use fluent-json-schema to help us generate a JSON Schema. We can then +use this schema to validate the request path parameters for our route (id).

tip

You can use fastify-type-provider-typebox or typebox if you want to convert your JSON Schema into a Typescript type. See this GitHub thread to have a better overview about it. Look at the example below to have a better overview.

Here you can see in practice of to leverage typebox combined with fastify-type-provider-typebox:

import { FastifyInstance } from "fastify";
import { Static, Type } from "@sinclair/typebox";
import { TypeBoxTypeProvider } from "@fastify/type-provider-typebox";

/**
* Creation of the JSON schema needed to validate the params passed to the route
*/
const schemaParams = Type.Object({
num1: Type.Number(),
num2: Type.Number(),
});

/**
* We convert the JSON schema to the TypeScript type, in this case:
* {
num1: number;
num2: number;
}
*/
type Params = Static<typeof schemaParams>;

/**
* Here we can pass the type previously created to our syncronous unit function
*/
const multiplication = ({ num1, num2 }: Params) => num1 * num2;

export default async function (app: FastifyInstance) {
app.withTypeProvider<TypeBoxTypeProvider>().get(
"/multiplication/:num1/:num2",
{ schema: { params: schemaParams } },
/**
* Since we leverage `withTypeProvider<TypeBoxTypeProvider>()`,
* we no longer need to explicitly define the `params`.
* The will be automatically inferred as:
* {
num1: number;
num2: number;
}
*/
({ params }) => multiplication(params)
);
}

Now let's add our REST API route in plugin.js:

'use strict'

const S = require('fluent-json-schema')

module.exports = async function plugin (app) {
app.log.info('plugin loaded')

// This JSON Schema will validate the request path parameters.
// It reuses part of the schema that Platormatic DB has
// automatically generated for our Quote entity.
const schema = {
params: S.object().prop('id', app.getSchema('Quote').properties.id)
}

app.post('/quotes/:id/like', { schema }, async function (request, response) {
return {}
})
}

We can now make a POST request to our new API route:

curl --request POST http://localhost:3042/quotes/1/like
info

Learn more about how validation works in the +Fastify validation documentation.

Our API route is currently returning an empty object ({}). Let's wire things +up so that it increments the number of likes for the quote with the specified ID. +To do this we'll add a new function inside of our plugin:

module.exports = async function plugin (app) {
app.log.info('plugin loaded')

async function incrementQuoteLikes (id) {
const { db, sql } = app.platformatic

const result = await db.query(sql`
UPDATE quotes SET likes = likes + 1 WHERE id=${id} RETURNING likes
`)

return result[0]?.likes
}

// ...
}

And then we'll call that function in our route handler function:

app.post('/quotes/:id/like', { schema }, async function (request, response) {
return { likes: await incrementQuoteLikes(request.params.id) }
})

Now when we make a POST request to our API route:

curl --request POST http://localhost:3042/quotes/1/like

We should see that the likes value for the quote is incremented every time +we make a request to the route.

{"likes":1}

Add a GraphQL API mutation

We can add a likeQuote mutation to our GraphQL API by reusing the +incrementQuoteLikes function that we just created.

Let's add this code at the end of our plugin, inside plugin.js:

module.exports = async function plugin (app) {
// ...

app.graphql.extendSchema(`
extend type Mutation {
likeQuote(id: ID!): Int
}
`)

app.graphql.defineResolvers({
Mutation: {
likeQuote: async (_, { id }) => await incrementQuoteLikes(id)
}
})
}

The code we've just added extends our API's GraphQL schema and defines +a corresponding resolver for the likeQuote mutation.

We can now load up GraphiQL in our web browser and try out our new likeQuote +mutation with this GraphQL query:

mutation {
likeQuote(id: 1)
}
info

Learn more about how to extend the GraphQL schema and define resolvers in the +Mercurius API documentation.

Enable CORS on the API

When we build "like" functionality into our frontend, we'll be making a client +side HTTP request to our GraphQL API. Our backend API and our frontend are running +on different origins, so we need to configure our API to allow requests from +the frontend. This is known as Cross-Origin Resource Sharing (CORS).

To enable CORS on our API, let's open up our API's .env file and add in +a new setting:

PLT_SERVER_CORS_ORIGIN=http://localhost:3000

The value of PLT_SERVER_CORS_ORIGIN is our frontend application's origin.

Now we can add a cors configuration object in our API's configuration file, +platformatic.db.json:

{
"server": {
"logger": {
"level": "{PLT_SERVER_LOGGER_LEVEL}"
},
"hostname": "{PLT_SERVER_HOSTNAME}",
"port": "{PORT}",
"cors": {
"origin": "{PLT_SERVER_CORS_ORIGIN}"
}
},
...
}

The HTTP responses from all endpoints on our API will now include the header:

access-control-allow-origin: http://localhost:3000

This will allow JavaScript running on web pages under the http://localhost:3000 +origin to make requests to our API.

Add like quote functionality

Now that our API supports "liking" a quote, let's integrate it as a feature in +our frontend.

First we'll create a new component, src/components/QuoteActionLike.astro:

---
export interface Props {
id: number;
likes: number;
}

const { id, likes } = Astro.props;
---
<span data-quote-id={id} class="like-quote cursor-pointer mr-5 flex items-center">
<svg class="like-icon w-6 h-6 mr-2 text-red-600" xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 24 24" stroke-width="1.5" stroke="currentColor" class="w-6 h-6">
<path stroke-linecap="round" stroke-linejoin="round" d="M21 8.25c0-2.485-2.099-4.5-4.688-4.5-1.935 0-3.597 1.126-4.312 2.733-.715-1.607-2.377-2.733-4.313-2.733C5.1 3.75 3 5.765 3 8.25c0 7.22 9 12 9 12s9-4.78 9-12z" />
</svg>
<span class="likes-count w-8">{likes}</span>
</span>

<style>
.like-quote:hover .like-icon,
.like-quote.liked .like-icon {
fill: currentColor;
}
</style>

And in our listing page, src/pages/index.astro, let's import our new +component and add it into the interface:

---
import Layout from '../layouts/Layout.astro';
import QuoteActionEdit from '../components/QuoteActionEdit.astro';
import QuoteActionDelete from '../components/QuoteActionDelete.astro';
import QuoteActionLike from '../components/QuoteActionLike.astro';
import { quotesApi, gql } from '../lib/quotes-api';

// ...
---

<Layout title="All quotes" page="listing">
<main>
{quotes.length > 0 ? quotes.map((quote) => (
<div class="border-b mb-6">
...
<div class="flex flex-col mb-6 text-gray-400">
<span class="flex items-center">
<QuoteActionLike id={quote.id} likes={quote.likes} />
<QuoteActionEdit id={quote.id} />
<QuoteActionDelete id={quote.id} />
</span>
<span class="mt-4 text-gray-400 italic">Added {new Date(Number(quote.createdAt)).toUTCString()}</span>
</div>
</div>
...

Then let's update the GraphQL query in this component's script to retrieve the +likes field for all quotes:

const { data } = await quotesApi.query(gql`
query {
quotes {
id
quote
saidBy
likes
createdAt
movie {
id
name
}
}
}
`);

Now we have the likes showing for each quote, let's wire things up so that +clicking on the like component for a quote will call our API and add a like.

Let's open up src/scripts/quote-actions.js and add a new function that +makes a request to our GraphQL API:

import { quotesApi, gql } from '../lib/quotes-api.js'

export function confirmDeleteQuote (form) {
if (confirm('Are you sure want to delete this quote?')) {
form.submit()
}
}

export async function likeQuote (likeQuote) {
likeQuote.classList.add('liked')
likeQuote.classList.remove('cursor-pointer')

const id = Number(likeQuote.dataset.quoteId)

const { data } = await quotesApi.mutation(gql`
mutation($id: ID!) {
likeQuote(id: $id)
}
`, { id })

if (data?.likeQuote) {
likeQuote.querySelector('.likes-count').innerText = data.likeQuote
}
}

And then let's attach the likeQuote function to the click event for each +like quote component on our listing page. We can do this by adding a little +extra code inside the <script> block in src/pages/index.astro:

<script>
import { confirmDeleteQuote, likeQuote } from '../scripts/quote-actions.js'

addEventListener('DOMContentLoaded', () => {
document.querySelectorAll('.form-delete-quote').forEach((deleteForm) => {
deleteForm.addEventListener('submit', (event) => {
event.preventDefault()
confirmDeleteQuote(event.currentTarget)
})
})

document.querySelectorAll('.like-quote').forEach((container) => {
container.addEventListener('click', (event) => likeQuote(event.currentTarget), { once: true })
})
})
</script>

Sort the listing by top quotes

Now that users can like their favourite quotes, as a final step, we'll allow +for sorting quotes on the listing page by the number of likes they have.

Let's update src/pages/index.astro to read a sort query string parameter +and use it the GraphQL query that we make to our API:

---
// ...

const allowedSortFields = ["createdAt", "likes"];
const searchParamSort = new URL(Astro.request.url).searchParams.get("sort");
const sort = allowedSortFields.includes(searchParamSort) ? searchParamSort : "createdAt";

const { data } = await quotesApi.query(gql`
query {
quotes(orderBy: {field: ${sort}, direction: DESC}) {
id
quote
saidBy
likes
createdAt
movie {
id
name
}
}
}
`);

const quotes = data?.quotes || [];
---
<Layout title="All quotes" page={`listing-${sort}`}>
...

Then let's replace the 'All quotes' link in the <nav> in src/layouts/Layout.astro +with two new links:

<nav class="prose mx-auto mb-6 border-y border-gray-200 flex">
<a href="/?sort=createdAt" class={`p-3 ${page === "listing-createdAt" && navActiveClasses}`}>Latest quotes</a>
<a href="/?sort=likes" class={`p-3 ${page === "listing-likes" && navActiveClasses}`}>Top quotes</a>
<a href="/add" class={`p-3 ${page === "add" && navActiveClasses}`}>Add a quote</a>
</nav>

With these few extra lines of code, our users can now sort quotes by when they +were created or by the number of likes that they have. Neat!

Wrapping up

And we're done — you now have the knowledge you need to build a full stack +application on top of Platformatic DB.

We can't wait to see what you'll build next!

+ + + + \ No newline at end of file diff --git a/docs/0.41.3/getting-started/new-api-project-instructions/index.html b/docs/0.41.3/getting-started/new-api-project-instructions/index.html new file mode 100644 index 00000000000..2206c1f62a0 --- /dev/null +++ b/docs/0.41.3/getting-started/new-api-project-instructions/index.html @@ -0,0 +1,20 @@ + + + + + +new-api-project-instructions | Platformatic Open Source Software + + + + + +
+
Version: 0.41.3

new-api-project-instructions

Run this command in your terminal to start the Platformatic creator wizard:

npm create platformatic@latest

This interactive command-line tool will ask you some questions about how you'd +like to set up your new Platformatic project. For this guide, select these options:

- Which kind of project do you want to create?  => DB
- Where would you like to create your project? => quick-start
- Do you want to create default migrations? => Yes
- Do you want to create a plugin? => Yes
- Do you want to use TypeScript? => No
- Do you want to install dependencies? => Yes (this can take a while)
- Do you want to apply the migrations? => Yes
- Do you want to generate types? => Yes
- Do you want to create the github action to deploy this application to Platformatic Cloud dynamic workspace? => No
- Do you want to create the github action to deploy this application to Platformatic Cloud static workspace? => No

Once the wizard is complete, you'll have a Platformatic app project in the +folder quick-start, with example migration files and a plugin script.

info

Make sure you run the npm/yarn/pnpm command install command manually if you +don't ask the wizard to do it for you.

+ + + + \ No newline at end of file diff --git a/docs/0.41.3/getting-started/quick-start-guide/index.html b/docs/0.41.3/getting-started/quick-start-guide/index.html new file mode 100644 index 00000000000..220adb52b68 --- /dev/null +++ b/docs/0.41.3/getting-started/quick-start-guide/index.html @@ -0,0 +1,38 @@ + + + + + +Quick Start Guide | Platformatic Open Source Software + + + + + +
+
Version: 0.41.3

Quick Start Guide

In this guide you'll learn how to create and run your first API with +Platformatic DB. Let's get started!

info

This guide uses SQLite for the database, but +Platformatic DB also supports PostgreSQL, +MySQL and MariaDB databases.

Prerequisites

Platformatic supports macOS, Linux and Windows (WSL recommended).

To follow along with this guide you'll need to have these things installed:

Create a new API project

Automatic CLI

Run this command in your terminal to start the Platformatic creator wizard:

npm create platformatic@latest

This interactive command-line tool will ask you some questions about how you'd +like to set up your new Platformatic project. For this guide, select these options:

- Which kind of project do you want to create?  => DB
- Where would you like to create your project? => quick-start
- Do you want to create default migrations? => Yes
- Do you want to create a plugin? => Yes
- Do you want to use TypeScript? => No
- Do you want to install dependencies? => Yes (this can take a while)
- Do you want to apply the migrations? => Yes
- Do you want to generate types? => Yes
- Do you want to create the github action to deploy this application to Platformatic Cloud dynamic workspace? => No
- Do you want to create the github action to deploy this application to Platformatic Cloud static workspace? => No

Once the wizard is complete, you'll have a Platformatic app project in the +folder quick-start, with example migration files and a plugin script.

info

Make sure you run the npm/yarn/pnpm command install command manually if you +don't ask the wizard to do it for you.

Start your API server

In your project directory, run this command to start your API server:

npm start

Your Platformatic API is now up and running! 🌟

This command will:

  • Automatically map your SQL database to REST and GraphQL API interfaces.
  • Start the Platformatic API server.

You can jump down to Next steps or read on to learn more about +the project files that the wizard has created for you.

Check the database schema

In your project directory (quick-start), open the migrations directory that can store your database migration files that will contain both the 001.do.sql and 001.undo.sql files. The 001.do.sql file contains the SQL statements to create the database objects, while the 001.undo.sql file contains the SQL statements to drop them.

migrations/001.do.sql
CREATE TABLE IF NOT EXISTS movies (
id INTEGER PRIMARY KEY,
title TEXT NOT NULL
);

Note that this migration has been already applied by Platformatic creator.

Check your API configuration

In your project directory, check the Platformatic configuration file named +platformatic.db.json and the environment file named .env:

The created configuration tells Platformatic to:

  • Run an API server on http://127.0.0.1:3042/
  • Connect to an SQLite database stored in a file named db.sqlite
  • Look for database migration files in the migrations directory
  • Load the plugin file named plugin.js and automatically generate types
tip

The Configuration reference explains all of the +supported configuration options.

Manual setup

Create a directory for your new API project:

mkdir quick-start

cd quick-start

Then create a package.json file and install the platformatic +CLI as a project dependency:

npm init --yes

npm install platformatic

Add a database schema

In your project directory (quick-start), create a file for your sqlite3 database and also, a migrations directory to +store your database migration files:

touch db.sqlite

mkdir migrations

Then create a new migration file named 001.do.sql in the migrations +directory.

Copy and paste this SQL query into the migration file:

migrations/001.do.sql
CREATE TABLE movies (
id INTEGER PRIMARY KEY,
title VARCHAR(255) NOT NULL
);

When it's run by Platformatic, this query will create a new database table +named movies.

tip

You can check syntax for SQL queries on the Database.Guide SQL Reference.

Configure your API

In your project directory, create a new Platformatic configuration file named +platformatic.db.json.

Copy and paste in this configuration:

platformatic.db.json
{
"server": {
"hostname": "127.0.0.1",
"port": "3042"
},
"db": {
"connectionString": "sqlite://./db.sqlite"
},
"migrations": {
"dir": "./migrations",
"autoApply": "true"
}
}

This configuration tells Platformatic to:

  • Run an API server on http://127.0.0.1:3042/
  • Connect to an SQLite database stored in a file named db.sqlite
  • Look for, and apply the database migrations specified in the migrations directory
tip

The Configuration reference explains all of the +supported configuration options.

Start your API server

In your project directory, use the Platformatic CLI to start your API server:

npx platformatic db start

This will:

  • Automatically map your SQL database to REST and GraphQL API interfaces.
  • Start the Platformatic API server.

Your Platformatic API is now up and running! 🌟

Next steps

Use the REST API interface

You can use cURL to make requests to the REST interface of your API, for example:

Create a new movie

curl -X POST -H "Content-Type: application/json" \
-d "{ \"title\": \"Hello Platformatic DB\" }" \
http://localhost:3042/movies

You should receive a response from your API like this:

{"id":1,"title":"Hello Platformatic DB"}

Get all movies

curl http://localhost:3042/movies

You should receive a response from your API like this, with an array +containing all the movies in your database:

[{"id":1,"title":"Hello Platformatic DB"}]
tip

If you would like to know more about what routes are automatically available, +take a look at the REST API reference +for an overview of the REST interface that the generated API provides.

Swagger OpenAPI documentation

You can explore the OpenAPI documentation for your REST API in the Swagger UI at +http://localhost:3042/documentation

Use the GraphQL API interface

Open http://localhost:3042/graphiql in your +web browser to explore the GraphQL interface of your API.

Try out this GraphQL query to retrieve all movies from your API:

query {
movies {
id
title
}
}
tip

Learn more about your API's GraphQL interface in the +GraphQL API reference.

+ + + + \ No newline at end of file diff --git a/docs/0.41.3/guides/add-custom-functionality/extend-graphql/index.html b/docs/0.41.3/guides/add-custom-functionality/extend-graphql/index.html new file mode 100644 index 00000000000..44565fef7aa --- /dev/null +++ b/docs/0.41.3/guides/add-custom-functionality/extend-graphql/index.html @@ -0,0 +1,18 @@ + + + + + +Extend GraphQL Schema | Platformatic Open Source Software + + + + + +
+
Version: 0.41.3

Extend GraphQL Schema

Sum Function

Copy and paste this code into ./sample-plugin.js file

'use strict'
module.exports = async(app, opts) => {
app.graphql.extendSchema(`
extend type Query {
add(x: Int, y: Int): Int
}
`)
app.graphql.defineResolvers({
Query: {
add: async (_, { x, y }) => x + y
}
})
}

This will add a new GraphQL query called add which will simply add the two inputs x and y provided.

You don't need to reload the server, since it will watch this file and hot-reload itself. +Let's query the server with the following body


query{
add(x: 1, y: 2)
}

You can use curl command to run this query

$ curl --location --request POST 'http://localhost:3042/graphql' \
--header 'Content-Type: application/json' \
--data-raw '{"query":"query{\n add(x: 1, y: 2)\n}"}'

You will get this output, with the sum.

{
"data": {
"add": 3
}
}

Extend Entities API

Let's implement a getPageByTitle query

'use strict'
module.exports = async(app, opts) => {
app.graphql.extendSchema(`
extend type Query {
getPageByTitle(title: String): Page
}
`)
app.graphql.defineResolvers({
Query: {
getPageByTitle: async(_, { title }) => {
const res = await app.platformatic.entities.page.find({
where: {
title: {
eq: title
}
}
})
if (res) {
return res[0]
}
return null
}
}
})
}

Page GraphQL type is already defined by Platformatic DB on start.

We are going to run this code against this GraphQL query

query{
getPageByTitle(title: "First Page"){
id
title
}
}

You can use curl command to run this query

$ curl --location --request POST 'http://localhost:3042/graphql' \
--header 'Content-Type: application/json' \
--data-raw '{"query":"query{\n getPageByTitle(title: \"First Page\"){\n id\n title\n }\n}"}'

You will get an output similar to this

{
"data": {
"getPageByTitle": {
"id": "1",
"title": "First Page"
}
}
}
+ + + + \ No newline at end of file diff --git a/docs/0.41.3/guides/add-custom-functionality/extend-rest/index.html b/docs/0.41.3/guides/add-custom-functionality/extend-rest/index.html new file mode 100644 index 00000000000..bfff9716291 --- /dev/null +++ b/docs/0.41.3/guides/add-custom-functionality/extend-rest/index.html @@ -0,0 +1,17 @@ + + + + + +Extend REST API | Platformatic Open Source Software + + + + + +
+
Version: 0.41.3

Extend REST API

We will follow same examples implemented in GraphQL examples: a sum function and an API to get pages by title.

Sum Function

Copy and paste this code into ./sample-plugin.js file

'use strict'
module.exports = async(app, opts) => {
app.post('/sum', async(req, reply) => {
const { x, y } = req.body
return { sum: (x + y)}
})
}

You don't need to reload the server, since it will watch this file and hot-reload itself.

Let's make a POST /sum request to the server with the following body

{
"x": 1,
"y": 2
}

You can use curl command to run this query

$ curl --location --request POST 'http://localhost:3042/sum' \
--header 'Content-Type: application/json' \
--data-raw '{
"x": 1,
"y": 2
}'

You will get this output, with the sum.

{
"sum": 3
}

Extend Entities API

Let's implement a /page-by-title endpoint, using Entities API

'use strict'
module.exports = async(app, opts) => {
app.get('/page-by-title', async(req, reply) => {
const { title } = req.query
const res = await app.platformatic.entities.page.find({
where: {
title: {
eq: title
}
}
})
if (res) {
return res[0]
}
return null
})
}

We will make a GET /page-by-title?title=First%20Page request, and we expect a single page as output.

You can use curl command to run this query

$ curl --location --request GET 'http://localhost:3042/page-by-title?title=First Page'

You will get an output similar to this

{
"id": "1",
"title": "First Page",
"body": "This is the first sample page"
}
+ + + + \ No newline at end of file diff --git a/docs/0.41.3/guides/add-custom-functionality/introduction/index.html b/docs/0.41.3/guides/add-custom-functionality/introduction/index.html new file mode 100644 index 00000000000..84284cd6a59 --- /dev/null +++ b/docs/0.41.3/guides/add-custom-functionality/introduction/index.html @@ -0,0 +1,17 @@ + + + + + +Add Custom Functionality | Platformatic Open Source Software + + + + + +
+
Version: 0.41.3

Add Custom Functionality

If you want to extend Platformatic DB features, it is possible to register a plugin, which will be in the form of a standard Fastify plugin.

The config file will specify where the plugin file is located as the example below:

{
...
"plugins": {
"paths": ["./plugin/index.js"]
}
}

The path is relative to the config file path.

Since it uses fastify-isolate under the hood, all other options of that package may be specified under the plugin property.

Once the config file is set up, you can write your plugin

module.exports = async function (app) {
app.log.info('plugin loaded')
// Extend GraphQL Schema with resolvers
app.graphql.extendSchema(`
extend type Query {
add(x: Int, y: Int): Int
}
`)
app.graphql.defineResolvers({
Query: {
add: async (_, { x, y }) => x + y
}
})

// Create a new route, see https://www.fastify.io/docs/latest/Reference/Routes/ for more info
app.post('/sum', (req, reply) => {
const {x, y} = req.body
return { result: x + y }
})

// access platformatic entities data
app.get('/all-entities', (req, reply) => {
const entities = Object.keys(app.platformatic.entities)
return { entities }
})
}

+ + + + \ No newline at end of file diff --git a/docs/0.41.3/guides/add-custom-functionality/prerequisites/index.html b/docs/0.41.3/guides/add-custom-functionality/prerequisites/index.html new file mode 100644 index 00000000000..828036d5716 --- /dev/null +++ b/docs/0.41.3/guides/add-custom-functionality/prerequisites/index.html @@ -0,0 +1,17 @@ + + + + + +Prerequisites | Platformatic Open Source Software + + + + + +
+
Version: 0.41.3

Prerequisites

In the following examples we assume you already

  • cloned platformatic/platformatic repo from Github
  • ran pnpm install to install all dependencies
  • have Docker and docker-compose installed and running on your machine

Config File

Create a platformatic.db.json file in the root project, it will be loaded automatically by Platformatic (no need of -c, --config flag).

{
"server": {
"hostname": "127.0.0.1",
"port": 3042,
"logger": {
"level": "info"
}
},
"db": {
"connectionString": "postgres://postgres:postgres@127.0.0.1/postgres"
},
"migrations": {
"dir": "./migrations",
"table": "versions"
},
"plugins": {
"paths": ["plugin.js"]
}
}
  • Once Platformatic DB starts, its API will be available at http://127.0.0.1:3042
  • It will connect and read the schema from a PostgreSQL DB
  • Will read migrations from ./migrations directory
  • Will load custom functionallity from ./plugin.js file.

Database and Migrations

Start the database using the sample docker-compose.yml file.

$ docker-compose up -d postgresql

For migrations create a ./migrations directory and a 001.do.sql file with following contents

CREATE TABLE pages (
id SERIAL PRIMARY KEY,
title VARCHAR(255) NOT NULL,
body TEXT NOT NULL
);
INSERT INTO pages (title, body) VALUES ('First Page', 'This is the first sample page');
INSERT INTO pages (title, body) VALUES ('Second Page', 'This is the second sample page');
INSERT INTO pages (title, body) VALUES ('Third Page', 'This is the third sample page');

Plugin

Copy and paste this boilerplate code into ./plugin.js file. We will fill this in the examples.

'use strict'

module.exports = async (app, opts) {
// we will fill this later
}

Start the server

Run

$ platformatic db start

You will get an output similar to this

                           /////////////
///// /////
/// ///
/// ///
/// ///
&& /// /// &&
&&&&&& /// /// &&&&&&
&&&& /// /// &&&&
&&& /// /// &&&&&&&&&&&&
&&& /// /////// //// && &&&&&
&& /// /////////////// &&&
&&& /// /// &&&
&&& /// // &&
&&& /// &&
&&& /// &&&
&&&& /// &&&
&&&&&% /// &&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&
///
///
///
///
///
///

[11:19:46.562] INFO (65122): running 001.do.sql
[11:19:46.929] INFO (65122): server listening
url: "http://127.0.0.1:3042"

Now is possible to create some examples, like extend GraphQL Schema, extend REST API

+ + + + \ No newline at end of file diff --git a/docs/0.41.3/guides/add-custom-functionality/raw-sql/index.html b/docs/0.41.3/guides/add-custom-functionality/raw-sql/index.html new file mode 100644 index 00000000000..6169cfb3c97 --- /dev/null +++ b/docs/0.41.3/guides/add-custom-functionality/raw-sql/index.html @@ -0,0 +1,17 @@ + + + + + +Raw SQL queries | Platformatic Open Source Software + + + + + +
+
Version: 0.41.3

Raw SQL queries

To run raw SQL queries using plugins, use app.platformatic.db.query method and passe to it a sql query using the app.platformatic.sql method.

'use strict'
module.exports = async(app, opts) => {
app.graphql.extendSchema(`
type YearlySales {
year: Int
sales: Int
}

extend type Query {
yearlySales: [YearlySales]
}
`)
app.graphql.defineResolvers({
Query: {
yearlySales: async(_, { title }) => {
const { db, sql } = app.platformatic;
const res = await db.query(sql(`
SELECT
YEAR(created_at) AS year,
SUM(amount) AS sales
FROM
orders
GROUP BY
YEAR(created_at)
`))
return res
}
}
})
}
+ + + + \ No newline at end of file diff --git a/docs/0.41.3/guides/compiling-typescript-for-deployment/index.html b/docs/0.41.3/guides/compiling-typescript-for-deployment/index.html new file mode 100644 index 00000000000..37a52879e8a --- /dev/null +++ b/docs/0.41.3/guides/compiling-typescript-for-deployment/index.html @@ -0,0 +1,25 @@ + + + + + +Compiling Typescript for Deployment | Platformatic Open Source Software + + + + + +
+
Version: 0.41.3

Compiling Typescript for Deployment

Platformatic Service provides automatic TypeScript compilation during the startup +of your Node.js server. While this provides an amazing developer experience, in production it adds additional +start time and it requires more resources. In this guide, we show how to compile your TypeScript +source files before shipping to a server.

Setup

The following is supported by all Platformatic applications, as they are all based on the same plugin system. +If you have generated your application using npx create-platformatic@latest, you will have a similar section in your config file:

{
...
"plugins": {
"paths": [{
"path": "plugins",
"encapsulate": false
}, "routes"],
"typescript": "{PLT_TYPESCRIPT}"
}
}

Note that the {PLT_TYPESCRIPT} will be automatically replaced with the PLT_TYPESCRIPT environment variable, that is configured in your +.env (and .env.sample) file:

PLT_TYPESCRIPT=true

Older Platformatic applications might not have the same layout, if so you can update your settings to match (after updating your dependencies).

Compiling for deployment

Compiling for deployment is then as easy as running plt service compile in that same folder. +Rememeber to set PLT_TYPESCRIPT=false in your environment variables in the deployed environments.

Usage with Runtime

If you are building a Runtime-based application, you will need +to compile every service independently or use the plt runtime compile command.

Avoid shipping TypeScript sources

If you want to avoid shipping the TypeScript sources you need to configure Platformatic with the location +where your files have been built by adding an outDir option:

{
...
"plugins": {
"paths": [{
"path": "plugins",
"encapsulate": false
}, "routes"],
"typescript": {
"enabled": "{PLT_TYPESCRIPT}",
"outDir": "dist"
}
}
}

This is not necessary if you include tsconfig.json together with the compiled code.

+ + + + \ No newline at end of file diff --git a/docs/0.41.3/guides/debug-platformatic-db/index.html b/docs/0.41.3/guides/debug-platformatic-db/index.html new file mode 100644 index 00000000000..24103521b99 --- /dev/null +++ b/docs/0.41.3/guides/debug-platformatic-db/index.html @@ -0,0 +1,17 @@ + + + + + +Debug Platformatic DB | Platformatic Open Source Software + + + + + +
+
Version: 0.41.3

Debug Platformatic DB

Error: No tables found in the database

  • Verify your database connection string is correct in your Platformatic DB configuration
    • Make sure the database name is correct
  • Ensure that you have run the migration command npx platformatic db migrations apply before starting the server. See the Platformatic DB Migrations documentation for more information on working with migrations.

Logging SQL queries

You can see all the queries that are being run against your database in your terminal by setting the logger level to trace in your platformatic.db.json config file:

platformatic.db.json
{
"server": {
"logger": {
"level": "trace"
}
}
}
+ + + + \ No newline at end of file diff --git a/docs/0.41.3/guides/deploying-on-lambda/index.html b/docs/0.41.3/guides/deploying-on-lambda/index.html new file mode 100644 index 00000000000..2a2f79260d2 --- /dev/null +++ b/docs/0.41.3/guides/deploying-on-lambda/index.html @@ -0,0 +1,26 @@ + + + + + +Deploying on AWS Lambda | Platformatic Open Source Software + + + + + +
+
Version: 0.41.3

Deploying on AWS Lambda

It is possible to deploy Platformatic applications to AWS Lambda +by leveraging @fastify/aws-lambda.

Once you set up your Platformatic DB application, such as following +our tutorial, you can create a +server.mjs file as follows:

import awsLambdaFastify from '@fastify/aws-lambda'
import { buildServer } from '@platformatic/db'

const app = await buildServer('./platformatic.db.json')
// You can use the same approach with both Platformatic DB and
// and service
// const app = await buildServer('./platformatic.service.json')

// The following also work for Platformatic Service applications
// import { buildServer } from '@platformatic/service'
export const handler = awsLambdaFastify(app)

// Loads the Application, must be after the call to `awsLambdaFastify`
await app.ready()

This would be the entry point for your AWS Lambda function.

Avoiding cold start

Caching the DB schema

If you use Platformatic DB, you want to turn on the schemalock +configuration to cache the schema +information on disk.

Set the db.schemalock configuration to true, start the application, +and a schema.lock file should appear. Make sure to commit that file and +deploy your lambda.

Provisioned concurrency

Since AWS Lambda now enables the use of ECMAScript (ES) modules in Node.js 14 runtimes, +you could lower the cold start latency when used with Provisioned Concurrency +thanks to the top-level await functionality. (Excerpt taken from @fastify/aws-lambda)

+ + + + \ No newline at end of file diff --git a/docs/0.41.3/guides/deployment/advanced-fly-io-deployment/index.html b/docs/0.41.3/guides/deployment/advanced-fly-io-deployment/index.html new file mode 100644 index 00000000000..d5fd03cf7b3 --- /dev/null +++ b/docs/0.41.3/guides/deployment/advanced-fly-io-deployment/index.html @@ -0,0 +1,22 @@ + + + + + +Advanced Fly.io Deployment | Platformatic Open Source Software + + + + + +
+
Version: 0.41.3

Advanced Fly.io Deployment

Techniques used in this guide are based on the Deploy to Fly.io with SQLite +deployment guide.

Adding sqlite for debugging

With a combination of Docker and Fly.io, you can create an easy way to debug +your sqlite aplication without stopping your application or exporting the data. +At the end of this guide, you will be able to run fly ssh console -C db-cli to +be dropped into your remote database.

Start by creating a script for launching the database, calling it db-cli.sh:

#!/bin/sh
set -x
# DSN will be defined in the Dockerfile
sqlite3 $DSN

Create a new Dockerfile which will act as the build and deployment image:

FROM node:18-alpine

# Setup sqlite viewer
RUN apk add sqlite
ENV DSN "/app/.platformatic/data/app.db"
COPY db-cli.sh /usr/local/bin/db-cli
RUN chmod +x /usr/local/bin/db-cli

WORKDIR /app
COPY package.json package.json
COPY package-lock.json package-lock.json

RUN npm ci --omit=dev

COPY platformatic.db.json platformatic.db.json

COPY migrations migrations
# Uncomment if your application is running a plugin
# COPY plugin.js plugin.js

EXPOSE 8080

CMD ["npm", "start"]

Add a start script to your package.json:

{
"scripts": {
"start": "platformatic db"
}
}

With Fly, it becomes straightforward to connect directly to the database by +running the following command from your local machine:

fly ssh console -C db-cli
+ + + + \ No newline at end of file diff --git a/docs/0.41.3/guides/deployment/deploy-to-fly-io-with-sqlite/index.html b/docs/0.41.3/guides/deployment/deploy-to-fly-io-with-sqlite/index.html new file mode 100644 index 00000000000..24a1db5a079 --- /dev/null +++ b/docs/0.41.3/guides/deployment/deploy-to-fly-io-with-sqlite/index.html @@ -0,0 +1,33 @@ + + + + + +Deploy to Fly.io with SQLite | Platformatic Open Source Software + + + + + +
+
Version: 0.41.3

Deploy to Fly.io with SQLite

note

To follow this how-to guide, you'll first need to install the Fly CLI and create +an account by following this official guide. +You will also need an existing Platformatic DB project, please check out our +getting started guide if needed.

Navigate to your Platformatic DB project in the terminal on your local machine. +Run fly launch and follow the prompts. When it asks if you want to deploy +now, say "no" as there are a few things that you'll need to configure first.

You can also create the fly application with one line. This will create your +application in London (lhr):

fly launch --no-deploy --generate-name --region lhr --org personal --path .

The fly CLI should have created a fly.toml file in your project +directory.

Explicit builder

The fly.toml file may be missing an explicit builder setting. To have +consistent builds, it is best to add a build section:

[build]
builder = "heroku/buildpacks:20"

Database storage

Create a volume for database storage, naming it data:

fly volumes create data

This will create storage in the same region as the application. The volume +defaults to 3GB size, use -s to change the size. For example, -s 10 is 10GB.

Add a mounts section in fly.toml:

[mounts]
source = "data"
destination = "/app/.platformatic/data"

Create a directory in your project where your SQLite database will be created:

mkdir -p .platformatic/data

touch .platformatic/data/.gitkeep

The .gitkeep file ensures that this directory will always be created when +your application is deployed.

You should also ensure that your SQLite database is ignored by Git. This helps +avoid inconsistencies when your application is deployed:

echo "*.db" >> .gitignore

The command above assumes that your SQLite database file ends with the extension +.db — if the extension is different then you must change the command to match.

Change the connection string to an environment variable and make sure that +migrations are autoApplying (for platformatic@^0.4.0) in platformatic.db.json:

{
"db": {
"connectionString": "{DATABASE_URL}"
},
"migrations": {
"dir": "./migrations",
"autoApply": true
}
}

Configure server

Make sure that your platformatic.db.json uses environment variables +for the server section:

{
"server": {
"logger": {
"level": "{PLT_SERVER_LOGGER_LEVEL}"
},
"hostname": "{PLT_SERVER_HOSTNAME}",
"port": "{PORT}"
}
}

Configure environment

Start with your local environment, create a .env file and put the following:

PORT=3042
PLT_SERVER_HOSTNAME=127.0.0.1
PLT_SERVER_LOGGER_LEVEL=debug
DATABASE_URL=sqlite://.platformatic/data/movie-quotes.db

Avoid accidental leaks by ignoring your .env file:

echo ".env" >> .gitignore

This same configuration needs to added to fly.toml:

[env]
PORT = 8080
PLT_SERVER_HOSTNAME = "0.0.0.0"
PLT_SERVER_LOGGER_LEVEL = "info"
DATABASE_URL = "sqlite:///app/.platformatic/data/movie-quotes.db"

Deploy application

A valid package.json will be needed so if you do not have one, generate one +by running npm init.

In your package.json, make sure there is a start script to run your +application:

{
"scripts": {
"start": "platformatic db"
}
}

Before deploying, make sure a .dockerignore file is created:

cp .gitignore .dockerignore

Finally, deploy the application to Fly by running:

fly deploy
+ + + + \ No newline at end of file diff --git a/docs/0.41.3/guides/deployment/index.html b/docs/0.41.3/guides/deployment/index.html new file mode 100644 index 00000000000..af68b986658 --- /dev/null +++ b/docs/0.41.3/guides/deployment/index.html @@ -0,0 +1,46 @@ + + + + + +Deployment | Platformatic Open Source Software + + + + + +
+
Version: 0.41.3

Deployment

Applications built with Platformatic DB can be deployed to a hosting service +in the same way as any other Node.js application. This guide covers a few +things that will help smooth the path from development to production.

Running a Platformatic DB application

Make the Platformatic CLI available

To run a Platformatic DB application, the Platformatic CLI must be available +in the production environment. The most straightforward way of achieving this +is to install it as a project dependency. +This means that when npm install (or npm ci) is run as part of your +build/deployment process, the Platformatic CLI will be installed.

Define an npm run script

A number of hosting services will automatically detect if your project's +package.json has a start npm run script. They will then execute the command +npm start to run your application in production.

You can add platformatic db start as the command for your project's start +npm run script, for example:

{
...
"scripts": {
"start": "platformatic db start",
},
}

Server configuration

info

See the Configuration reference for all +configuration settings.

Configuration with environment variables

We recommend that you use environment variable placeholders +in your Platformatic DB configuration. This will allow you to configure +different settings in your development and production environments.

In development you can set the environment variables via a .env file +that will be automatically loaded by Platformatic DB. For example:

PORT=3042
PLT_SERVER_HOSTNAME=127.0.0.1

In production your hosting provider will typically provide their own mechanism +for setting environment variables.

Configure the server port

Configure the port that the server will listen on by setting an environment +variable placeholder in your Platformatic DB configuration file:

platformatic.db.json
{
"server": {
...
"port": "{PORT}"
},
...
}

Listen on all network interfaces

Most hosting providers require that you configure your server to bind to all +available network interfaces. To do this you must set the server hostname to +0.0.0.0.

This can be handled with an environment variable placeholder in your Platformatic +DB configuration file:

platformatic.db.json
{
"server": {
...
"hostname": "{PLT_SERVER_HOSTNAME}",
},
...
}

The environment variable PLT_SERVER_HOSTNAME should then be set to 0.0.0.0 +in your hosting environment.

Security considerations

We recommend disabling the GraphiQL web UI in production. It can be disabled +with the following configuration:

platformatic.db.json
{
"db": {
...
"graphql": {
"graphiql": false
}
},
...
}

If you want to use this feature in development, replace the configuration +values with environment variable placeholders +so you can set it to true in development and false in production.

Removing the welcome page

If you want to remove the welcome page, you should register an index route.

module.exports = async function (app) {
// removing the welcome page
app.get('/', (req, reply) => {
return { hello: 'world' }
})
}

Databases

Applying migrations

If you're running a single instance of your application in production, it's +best to allow Platformatic DB to automatically run migrations when the server +starts is. This reduces the chance of a currently running instance using a +database structure it doesn't understand while the new version is still being +deployed.

SQLite

When using an SQLite database, you can ensure you don’t commit it to your Git +repository by adding the SQLite database filename to your .gitignore file. +The SQLite database file will be automatically generated by Platformatic DB +when your application migrations are run in production.

+ + + + \ No newline at end of file diff --git a/docs/0.41.3/guides/dockerize-platformatic-app/index.html b/docs/0.41.3/guides/dockerize-platformatic-app/index.html new file mode 100644 index 00000000000..1fe4053c7ec --- /dev/null +++ b/docs/0.41.3/guides/dockerize-platformatic-app/index.html @@ -0,0 +1,20 @@ + + + + + +Dockerize a Platformatic App | Platformatic Open Source Software + + + + + +
+
Version: 0.41.3

Dockerize a Platformatic App

This guide explains how to create a new Platformatic DB app, which connects to a PostgreSQL database.

We will then create a docker-compose.yml file that will run both services in separate containers

Generate a Platformatic DB App

Run this command in your terminal to start the Platformatic creator wizard:

npm create platformatic@latest

This interactive command-line tool will ask you some questions about how you'd +like to set up your new Platformatic project. For this guide, select these options:

- Which kind of project do you want to create?  => DB
- Where would you like to create your project? => quick-start
- Do you want to create default migrations? => Yes
- Do you want to create a plugin? => Yes
- Do you want to use TypeScript? => No
- Do you want to install dependencies? => Yes (this can take a while)
- Do you want to apply the migrations? => Yes
- Do you want to generate types? => Yes
- Do you want to create the github action to deploy this application to Platformatic Cloud dynamic workspace? => No
- Do you want to create the github action to deploy this application to Platformatic Cloud static workspace? => No

Once the wizard is complete, you'll have a Platformatic app project in the +folder quick-start, with example migration files and a plugin script.

info

Make sure you run the npm/yarn/pnpm command install command manually if you +don't ask the wizard to do it for you.

Create Docker image for the Platformatic DB App

In this step you are going to create some files into the root project directory

  • .dockerignore - This file tells Docker to ignore some files when copying the directory into the image filesystem
node_modules
.env*
  • start.sh - This is our entrypoint. We will run migrations then start platformatic
#!/bin/sh

echo "Running migrations..." && \
npx platformatic db migrations apply && \
echo "Starting Platformatic App..." && \
npm start
info

Make sure you make this file executable with the command chmod +x start.sh

  • Dockerfile - This is the file Docker uses to create the image
FROM node:18-alpine
WORKDIR /usr/src/app
COPY . .
RUN npm install
COPY . .
EXPOSE 3042
CMD [ "./start.sh" ]

At this point you can build your Docker image with the command

$ docker build -t platformatic-app .

Create Docker Compose config file

docker-compose.yml is the configuration file for docker-compose which will spin up containers for both PostgresSQL and our Platformatic App

version: "3.3"
services:
postgresql:
ports:
- "5433:5432"
image: "postgres:15-alpine"
environment:
- POSTGRES_PASSWORD=postgres
platformatic:
ports:
- "3042:3042"
image: 'platformatic-app:latest'
depends_on:
- postgresql
links:
- postgresql
environment:
PLT_SERVER_HOSTNAME: ${PLT_SERVER_HOSTNAME}
PORT: ${PORT}
PLT_SERVER_LOGGER_LEVEL: ${PLT_SERVER_LOGGER_LEVEL}
DATABASE_URL: postgres://postgres:postgres@postgresql:5432/postgres

A couple of things to notice:

  • The Platformatic app is started only once the database container is up and running (depends_on).
  • The Platformatic app is linked with postgresql service. Meaning that inside its container ping postgresql will be resolved with the internal ip of the database container.
  • The environment is taken directly from the .env file created by the wizard

You can now run your containers with

$ docker-compose up # (-d if you want to send them in the background)

Everything should start smoothly, and you can access your app pointing your browser to http://0.0.0.0:3042

To stop the app you can either press CTRL-C if you are running them in the foreground, or, if you used the -d flag, run

$ docker-compose down
+ + + + \ No newline at end of file diff --git a/docs/0.41.3/guides/generate-frontend-code-to-consume-platformatic-rest-api/index.html b/docs/0.41.3/guides/generate-frontend-code-to-consume-platformatic-rest-api/index.html new file mode 100644 index 00000000000..96dc5763fa7 --- /dev/null +++ b/docs/0.41.3/guides/generate-frontend-code-to-consume-platformatic-rest-api/index.html @@ -0,0 +1,32 @@ + + + + + +Generate Front-end Code to Consume Platformatic REST API | Platformatic Open Source Software + + + + + +
+
Version: 0.41.3

Generate Front-end Code to Consume Platformatic REST API

By default, a Platformatic app exposes REST API that provide CRUD (Create, Read, +Update, Delete) functionality for each entity (see the +Introduction to the REST API +documentation for more information on the REST API).

Platformatic CLI allows to auto-generate the front-end code to import in your +front-end application to consume the Platformatic REST API.

This guide

  • Explains how to create a new Platformatic app.
  • Explains how to configure the new Platformatic app.
  • Explains how to create a new React or Vue.js front-end application.
  • Explains how to generate the front-end TypeScript code to consume the Platformatic app REST API.
  • Provide some React and Vue.js components (either of them written in TypeScript) that read, create, and update an entity.
  • Explains how to import the new component in your front-end application.

Create a new Platformatic app

Run this command in your terminal to start the Platformatic creator wizard:

npm create platformatic@latest

This interactive command-line tool will ask you some questions about how you'd +like to set up your new Platformatic project. For this guide, select these options:

- Which kind of project do you want to create?  => DB
- Where would you like to create your project? => quick-start
- Do you want to create default migrations? => Yes
- Do you want to create a plugin? => Yes
- Do you want to use TypeScript? => No
- Do you want to install dependencies? => Yes (this can take a while)
- Do you want to apply the migrations? => Yes
- Do you want to generate types? => Yes
- Do you want to create the github action to deploy this application to Platformatic Cloud dynamic workspace? => No
- Do you want to create the github action to deploy this application to Platformatic Cloud static workspace? => No

Once the wizard is complete, you'll have a Platformatic app project in the +folder quick-start, with example migration files and a plugin script.

info

Make sure you run the npm/yarn/pnpm command install command manually if you +don't ask the wizard to do it for you.

Configure the new Platformatic app

documentation to create a new Platformatic app. Every Platformatic app uses the "Movie" demo entity and includes +the corresponding table, migrations, and REST API to create, read, update, and delete movies.

Once the new Platformatic app is ready:

  • Set up CORS in platformatic.db.json
{
"$schema": "https://platformatic.dev/schemas/v0.24.0/db",
"server": {
"hostname": "{PLT_SERVER_HOSTNAME}",
"port": "{PORT}",
"logger": {
"level": "{PLT_SERVER_LOGGER_LEVEL}"
},
+ "cors": {
+ "origin": {
+ "regexp": "/*/"
+ }
+ }
},
...
}

You can find more details about the cors configuration here.

  • launch Platformatic through npm start. +Then, the Platformatic app should be available at the http://127.0.0.1:3042/ URL.

Create a new Front-end Application

Refer to the Scaffolding Your First Vite Project +documentation to create a new front-end application, and call it "rest-api-frontend".

info

Please note Vite is suggested only for practical reasons, but the bundler of choice does not make any difference.

If you are using npm 7+ you should run

npm create vite@latest rest-api-frontend -- --template react-ts

and then follow the Vite's instructions

Scaffolding project in /Users/noriste/Sites/temp/platformatic/rest-api-frontend...

Done. Now run:

cd rest-api-frontend
npm install
npm run dev

Once done, the front-end application is available at http://localhost:5174/.

Generate the front-end code to consume the Platformatic app REST API

Now that either the Platformatic app and the front-end app are running, go to the front-end codebase and run the Platformatic CLI

cd rest-api-frontend/src
npx platformatic frontend http://127.0.0.1:3042 ts

Refer to the Platformatic CLI frontend command +documentation to know about the available options.

The Platformatic CLI generates

  • api.d.ts: A TypeScript module that includes all the OpenAPI-related types. +Here is part of the generated code
interface GetMoviesRequest {
'limit'?: number;
'offset'?: number;
// ... etc.
}

interface GetMoviesResponseOK {
'id'?: number;
'title': string;
}


// ... etc.

export interface Api {
setBaseUrl(baseUrl: string): void;
getMovies(req: GetMoviesRequest): Promise<Array<GetMoviesResponseOK>>;
createMovie(req: CreateMovieRequest): Promise<CreateMovieResponseOK>;
// ... etc.
}
  • api.ts: A TypeScript module that includes a typed function for every single OpenAPI endpoint. +Here is part of the generated code
import type { Api } from './api-types'

let baseUrl = ''
export function setBaseUrl(newUrl: string) { baseUrl = newUrl };

export const createMovie: Api['createMovie'] = async (request) => {
const response = await fetch(`${baseUrl}/movies/`, {
method:'post',
body: JSON.stringify(request),
headers: {
'Content-Type': 'application/json'
}
})

if (!response.ok) {
throw new Error(await response.text())
}

return await response.json()
}

// etc.

You can add a --name option to the command line to provide a custom name for the generated files.

cd rest-api-frontend/src
npx platformatic frontend --name foobar http://127.0.0.1:3042 ts

will generated foobar.ts and foobar-types.d.ts

React and Vue.js components that read, create, and update an entity

You can copy/paste the following React or Vue.js components that import the code +the Platformatic CLI generated.

Create a new file src/PlatformaticPlayground.tsx and copy/paste the following code.

import { useEffect, useState } from 'react'

// getMovies, createMovie, and updateMovie are all functions automatically generated by Platformatic
// in the `api.ts` module.
import { getMovies, createMovie, updateMovie, setBaseUrl } from './api'

setBaseUrl('http://127.0.0.1:3042') // configure this according to your needs

export function PlatformaticPlayground() {
const [movies, setMovies] = useState<Awaited<ReturnType<typeof getMovies>>>([])
const [newMovie, setNewMovie] = useState<Awaited<ReturnType<typeof createMovie>>>()

async function onCreateMovie() {
const newMovie = await createMovie({ title: 'Harry Potter' })
setNewMovie(newMovie)
}

async function onUpdateMovie() {
if (!newMovie || !newMovie.id) return

const updatedMovie = await updateMovie({ id: newMovie.id, title: 'The Lord of the Rings' })
setNewMovie(updatedMovie)
}

useEffect(() => {
async function fetchMovies() {
const movies = await getMovies({})
setMovies(movies)
}

fetchMovies()
}, [])

return (
<>
<h2>Movies</h2>

{movies.length === 0 ? (
<div>No movies yet</div>
) : (
<ul>
{movies.map((movie) => (
<li key={movie.id}>{movie.title}</li>
))}
</ul>
)}

<button onClick={onCreateMovie}>Create movie</button>
<button onClick={onUpdateMovie}>Update movie</button>

{newMovie && <div>Title: {newMovie.title}</div>}
</>
)
}

Import the new component in your front-end application

You need to import and render the new component in the front-end application.

Change the App.tsx as follows

import { useState } from 'react'
import reactLogo from './assets/react.svg'
import viteLogo from '/vite.svg'
import './App.css'

+import { PlatformaticPlayground } from './PlatformaticPlayground'

function App() {
const [count, setCount] = useState(0)

return (
<>
+ <PlatformaticPlayground />
<div>
<a href="https://vitejs.dev" target="_blank">
<img src={viteLogo} className="logo" alt="Vite logo" />
</a>
<a href="https://react.dev" target="_blank">
<img src={reactLogo} className="logo react" alt="React logo" />
</a>
</div>
<h1>Vite + React</h1>
<div className="card">
<button onClick={() => setCount((count) => count + 1)}>count is {count}</button>
<p>
Edit <code>src/App.tsx</code> and save to test HMR
</p>
</div>
<p className="read-the-docs">Click on the Vite and React logos to learn more</p>
</>
)
}

export default App

Have fun

Art the top of the front-end application the new component requests the movies to the Platformatic app and list them.

Platformatic frontend guide: listing the movies

Click on "Create movie" to create a new movie called "Harry Potter".

Platformatic frontend guide: creating a movie

Click on "Update movie" to rename "Harry Potter" into "Lord of the Rings".

Platformatic frontend guide: editing a movie

Reload the front-end application to see the new "Lord of the Rings" movie listed.

Platformatic frontend guide: listing the movies +.

+ + + + \ No newline at end of file diff --git a/docs/0.41.3/guides/jwt-auth0/index.html b/docs/0.41.3/guides/jwt-auth0/index.html new file mode 100644 index 00000000000..4ef04247a99 --- /dev/null +++ b/docs/0.41.3/guides/jwt-auth0/index.html @@ -0,0 +1,21 @@ + + + + + +Configure JWT with Auth0 | Platformatic Open Source Software + + + + + +
+
Version: 0.41.3

Configure JWT with Auth0

Auth0 is a powerful authentication and authorization service provider that can be integrated with Platformatic DB through JSON Web Tokens (JWT) tokens. +When a user is authenticated, Auth0 creates a JWT token with all necessary security informations and custom claims (like the X-PLATFORMATIC-ROLE, see User Metadata) and signs the token.

Platformatic DB needs the correct public key to verify the JWT signature. +The fastest way is to leverage JWKS, since Auth0 exposes a JWKS endpoint for each tenant. +Given a Auth0 tenant's issuer URL, the (public) keys are accessible at ${issuer}/.well-known/jwks.json. +For instance, if issuer is: https://dev-xxx.us.auth0.com/, the public keys are accessible at https://dev-xxx.us.auth0.com/.well-known/jwks.json

To configure Platformatic DB authorization to use JWKS with Auth0, set:


...
"authorization": {
"jwt": {
"jwks": {
"allowedDomains": [
"https://dev-xxx.us.auth0.com/"
]
}
},
}
...

danger

Note that specify allowedDomains is critical to correctly restrict the JWT that MUST be issued from one of the allowed domains.

Custom Claim Namespace

In Auth0 there are restrictions about the custom claim that can be set on access tokens. One of these is that the custom claims MUST be namespaced, i.e. we cannot have X-PLATFORMATIC-ROLE but we must specify a namespace, e.g.: https://platformatic.dev/X-PLATFORMATIC-ROLE

To map these claims to user metadata removing the namespace, we can specify the namespace in the JWT options:

...
"authorization": {
"jwt": {
"namespace": "https://platformatic.dev/",
"jwks": {
"allowedDomains": [
"https://dev-xxx.us.auth0.com/"
]
}
},
}
...

With this configuration, the https://platformatic.dev/X-PLATFORMATIC-ROLE claim is mapped to X-PLATFORMATIC-ROLE user metadata.

+ + + + \ No newline at end of file diff --git a/docs/0.41.3/guides/migrating-express-app-to-platformatic-service/index.html b/docs/0.41.3/guides/migrating-express-app-to-platformatic-service/index.html new file mode 100644 index 00000000000..b4cd24647cd --- /dev/null +++ b/docs/0.41.3/guides/migrating-express-app-to-platformatic-service/index.html @@ -0,0 +1,17 @@ + + + + + +Migrating an Express app to Platformatic Service | Platformatic Open Source Software + + + + + +
+
Version: 0.41.3

Migrating an Express app to Platformatic Service

Introduction

Our open-source tools are built on top of the modern and flexible Fastify web framework. It provides logging, request validation and a powerful plugin system out-of-the-box, as well as incredible performance.

If you have an existing Express application, migrating it to Fastify could potentially be time consuming, and might not be something that you're able to prioritise right now. You can however still take advantage of Fastify and our open-source tools. In this guide you'll learn how to use the @fastify/express plugin to help you rapidly migrate your existing Express application to use Platformatic Service.

This guide assumes that you have some experience building applications with the Express framework.

Example Express application

For the purpose of this guide, we have a basic example Express application. Although this app has a specific structure, the migration steps covered in this guide can generally be applied to any Express application.

The code for the example Express and migrated Platformatic Service applications is available on GitHub.

Here's the structure of the example Express application:

├── app.js
├── package.json
├── routes
│ └── users.js
└── server.js

It has the following dependencies:

// package.json

"dependencies": {
"express": "^4.18.2"
}

The application has routes in routes/users.js:

// routes/users.js

import express from 'express'

const router = express.Router()

router.use(express.json())

router.post('/', function createUser(request, response, next) {
const newUser = request.body

if (!newUser) {
return next(new Error('Error creating user'))
}

response.status(201).json(newUser)
})

router.get('/:user_id', function getUser(request, response, next) {
const user = {
id: Number(request.params.user_id),
first_name: 'Bobo',
last_name: 'Oso'
}

response.json(user)
})

export const usersRoutes = router

In app.js, we have a factory function that creates a new Express server instance and mounts the routes:

// app.js

import express from 'express'

import { usersRoutes } from './routes/users.js'

export default function buildApp() {
const app = express()

app.use('/users', usersRoutes)

return app
}

And in server.js we're calling the factory function and starting the server listening for HTTP requests:

// server.js

import buildApp from './app.js'

const express = buildApp()

express.listen(3042, () => {
console.log('Example app listening at http://localhost:3042')
})

The routes in your Express application should be mounted on an Express router (or multiple routers if needed). This will allow them to be mounted using @fastify/express when you migrate your app to Platformatic Service.

Creating a new Platformatic Service app

To migrate your Express app to Platformatic Service, create a new Platformatic Service app with:

npm create platformatic@latest

Be sure to select Service as the project type. You should also say yes when you're asked if you want to create the GitHub Actions workflows for deploying your application to Platformatic Cloud.

Once the project has been created, you can delete the example plugins and routes directories.

Using ES modules

If you're using ES modules in the Express application code that you'll be migrating, ensure that there's a type field in package.json set to module:

npm pkg set type=module

Migrate the Express routes

Copy over the routes directory from your Express app.

Install @fastify/express

Install the @fastify/express Fastify plugin to add full Express compability to your Platformatic Service app:

npm install @fastify/express

Mounting the Express routes

Create a root Fastify plugin that register's the @fastify/express plugin and loads your Express routes:

// root-plugin.js

import { usersRoutes } from './routes/users.js'

/** @param {import('fastify').FastifyInstance} app */
export default async function (app) {
await app.register(import('@fastify/express'))

app.use('/users', usersRoutes)
}

Configuring the Platformatic Service app

Edit your app's platformatic.service.json to load your root plugin:

// platformatic.service.json

{
...,
"plugins": {
"paths": [{
"path": "./root-plugin.js",
"encapsulate": false
}],
"hotReload": false
},
"watch": false
}

These settings are important when using @fastify/express in a Platformatic Service app:

  • encapsulate — You'll need to disable encapsulation for any Fastify plugin which mounts Express routes. This is due to the way that @fastify/express works.
  • hotReload and watch — You'll need to disable hot reloading and watching for your app, as they don't currently work when using @fastify/express. This is a known issue that we're working to fix.

Wrapping up

You can learn more about building Node.js apps with Platformatic service in the Platformatic Service documentation.

Once you've migrated your Express app to use Platformatic Service with @fastify/express, you might then want to consider fully migrating your Express routes and application code to Fastify. This tutorial shows how you can approach that migration process: How to migrate your app from Express to Fastify (video).

+ + + + \ No newline at end of file diff --git a/docs/0.41.3/guides/migrating-fastify-app-to-platformatic-service/index.html b/docs/0.41.3/guides/migrating-fastify-app-to-platformatic-service/index.html new file mode 100644 index 00000000000..2522e4c0fdc --- /dev/null +++ b/docs/0.41.3/guides/migrating-fastify-app-to-platformatic-service/index.html @@ -0,0 +1,17 @@ + + + + + +Migrating a Fastify app to Platformatic Service | Platformatic Open Source Software + + + + + +
+
Version: 0.41.3

Migrating a Fastify app to Platformatic Service

Introduction

Building production ready Node.js application with Fastify can require a certain amount of boilerplate code. This is a side effect of some of Fastify's technical principles:

  • If it can be a plugin, it should be a pluginPlugins help with the separation of concerns, they improve testability, and also provide a way to logically organise and structure your applications.
  • Developer choice = developer freedom — Fastify only applies a few strong opinions, in key areas such as logging and validation. The framework features have been designed to give you the freedom to build your applications however you want.
  • You know your needs best — Fastify doesn't make assumptions about what plugins you'll need in your application. As the Fastify plugin ecosystem and the community has grown, a clear group of popular plugin choices has emerged.

Platformatic Service is the natural evolution of the build-it-from-scratch Fastify development experience. It provides a solid foundation for building Node.js applications on top of Fastify, with best practices baked in.

See the Building apps with Platformatic Service section of this guide to learn more about the built-in features.

The good news is that the path to migrate a Fastify application to use Platformatic Service is fairly straightforward. This guide covers some of the things you'll need to know when migrating an application, as well as tips on different migration approaches.

This guide assumes that you have some experience building applications with the Fastify framework. If you'd like to learn more about about building web applications with Fastify, we recommend taking a look at:

Example Fastify application

For the purpose of this guide, we have a basic example Fastify application. Although this app has a specific structure, the migration steps covered in this guide can generally be applied to any Fastify application.

The code for the example Fastify and migrated Platformatic Service applications is available on GitHub.

Here's the structure of the example Fastify application:

├── app.js
├── package.json
├── plugins
│   └── data-source.js
├── routes
│   ├── movies.js
│   └── quotes.js
├── server.js
└── test
└── routes.test.js

It has the following dependencies:

// package.json

"dependencies": {
"fastify": "^4.17.0",
"fastify-plugin": "^4.5.0"
}

The application has a plugin that decorates the Fastify server instance, as well as two Fastify plugins which define API routes. Here's the code for them:

// plugins/data-source.js

import fastifyPlugin from 'fastify-plugin'

/** @param {import('fastify').FastifyInstance} app */
async function dataSource (app) {
app.decorate('movies', [
'Jaws',
'Star Wars',
'The Wizard of Oz'
])

app.decorate('quotes', [
'You\'re gonna need a bigger boat.',
'May the Force be with you.',
'Toto, I\'ve got a feeling we\'re not in Kansas anymore.'
])
}

export default fastifyPlugin(dataSource)

fastify-plugin is used to to prevent Fastify from creating a new encapsulation context for the plugin. This makes the decorators that are registered in the dataSource plugin available in the route plugins. You can learn about this fundamental Fastify concept in the Fastify Encapsulation documentation.

// routes/movies.js

/** @param {import('fastify').FastifyInstance} app */
export default async function movieRoutes (app) {
app.get('/', async (request, reply) => {
return app.movies
})
}
// routes/quotes.js

/** @param {import('fastify').FastifyInstance} app */
export default async function quotesRoutes (app) {
app.get('/', async (request, reply) => {
return app.quotes
})
}

The route plugins aren't registering anything that needs to be available in other plugins. They have their own encapsulation context and don't need to be wrapped with fastify-plugin.

There's also a buildApp() factory function in app.js, which takes care of creating a new Fastify server instance and registering the plugins and routes:

// app.js

import fastify from 'fastify'

export async function buildApp (options = {}) {
const app = fastify(options)

app.register(import('./plugins/data-source.js'))

app.register(import('./routes/movies.js'), { prefix: '/movies' })
app.register(import('./routes/quotes.js'), { prefix: '/quotes' })

return app
}

And server.js, which calls the buildApp function to create a new Fastify server, and then starts it listening:

// server.js

import { buildApp } from './app.js'

const port = process.env.PORT || 3042
const host = process.env.HOST || '127.0.0.1'

const options = {
logger: {
level: 'info'
}
}

const app = await buildApp(options)

await app.listen({ port, host })

As well as a couple of tests for the API routes:

// tests/routes.test.js

import { test } from 'node:test'
import assert from 'node:assert/strict'

import { buildApp } from '../app.js'

test('Basic API', async (t) => {
const app = await buildApp()

t.after(async () => {
await app.close()
})

await t.test('GET request to /movies route', async () => {
const response = await app.inject({
method: 'GET',
url: '/movies'
})

assert.equal(response.statusCode, 200)
assert.deepEqual(response.json(), [
'Jaws',
'Star Wars',
'The Wizard of Oz'
])
})

await t.test('GET request to /quotes route', async () => {
const response = await app.inject({
method: 'GET',
url: '/quotes'
})

assert.equal(response.statusCode, 200)
assert.deepEqual(response.json(), [
'You\'re gonna need a bigger boat.',
'May the Force be with you.',
'Toto, I\'ve got a feeling we\'re not in Kansas anymore.'
])
})
})

These tests are using the built in Node.js test runner, node:test. They can be run with the command: node --test --test-reporter=spec test/*.test.js.

The @param lines in this application code are JSDoc blocks that import the FastifyInstance type. This allows many code editors to provide auto-suggest, type hinting and type checking for your code.

Creating a new Platformatic Service app

To migrate your Fastify app to Platformatic Service, create a new Platformatic Service app with:

npm create platformatic@latest

Be sure to select Service as the project type. Once the project has been created, you can delete the example plugins and routes directories.

App configuration

The configuration for the Platformatic Service app is stored in platformatic.service.json.

The generated configuration is set up to load plugins from the plugins and routes directories:

// platformatic.service.json

"plugins": {
"paths": [
"./plugins",
"./routes"
]
}

The value for any configuration setting in platformatic.service.json can be replaced with an environment variable by adding a placeholder, for example {PLT_SERVER_LOGGER_LEVEL}. In development, environment variables are automatically loaded by your Platformatic Service app from a .env file in the root directory of your app. In production, you'll typically set these environment variables using a feature provided by your hosting provider.

See the Platformatic Service documentation for Environment variable placeholders to learn more about how this works.

Using ES modules

If you're using ES modules in the Fastify application code that you'll be migrating, ensure that there's a type field in package.json set to module:

npm pkg set type=module

Refactoring Fastify server factories

If your Fastify application has a script with a factory function to create and build up a Fastify server instance, you can refactor it into a Fastify plugin and use it in your Platformatic Service app.

Here are a few things to consider while refactoring it:

  • Move the options you're passing to Fastify when creating a new server instance to the server block in platformatic.service.json. These options will be passed through directly by Platformatic Service when it creates a Fastify server instance.
  • You can create a root plugin to be loaded by your Platformatic Service app, for example: export default async function rootPlugin (app, options) { ... }
  • When you copy the code from your factory function into your root plugin, remove the code which is creating the Fastify server instance.
  • You can configure your Platformatic Service to load the root plugin, for example:
    "plugins": {
    "paths": ["./root-plugin.js"]
    }
  • If you need to pass options to your root plugin, you can do it like this:
    "plugins": {
    "paths": [
    {
    "path": "./root-plugin.js",
    "options": {
    "someOption": true
    }
    }
    ]
    }

Migrating plugins

Copy over the plugins directory from your Fastify app. You shouldn't need to make any modifications for them to work with Platformatic Service.

Disabling plugin encapsulation

Platformatic Service provides a configuration setting which enables you to disable encapsulation for a plugin, or all the plugins within a directory. This will make any decorators or hooks that you set in those plugins available to all other plugins. This removes the need for you to wrap your plugins with fastify-plugin.

To disable encapsulation for all plugins within the plugins directory, you would set your plugins configuration like this in platformatic.service.json:

// platformatic.service.json

"plugins": {
"paths": [
{
"path": "./plugins",
"encapsulate": false
},
"./routes"
]
}

You can learn more about plugin encapsulation in the Fastify Plugins Guide.

Migrating routes

Copy over the routes directory from your Fastify app.

Explicit route paths

If you're registering routes in your Fastify application with full paths, for example /movies, you won't need to make any changes to your route plugins.

Route prefixing with file-system based routing

If you're using the prefix option when registering route plugins in your Fastify application, for example:

app.register(import('./routes/movies.js'), { prefix: '/movies' })

You can achieve the same result with Platformatic Service by using file-system based routing. With the following directory and file structure:

routes/
├── movies
│   └── index.js
└── quotes
└── index.js

Assuming that both of the route files register a / route, these are the route paths that will be registered in your Platformatic Service app:

/movies
/quotes

With the example Fastify application, this would mean copying the route files over to these places in the Platformatic Service app:

routes/movies.js -> routes/movies/index.js
routes/quotes.js -> routes/quotes/index.js

How does this work? Plugins are loaded with the @fastify/autoload Fastify plugin. The dirNameRoutePrefix plugin option for @fastify/autoload is enabled by default. This means that "routes will be automatically prefixed with the subdirectory name in an autoloaded directory".

If you'd prefer not to use file-system based routing with Platformatic Service, you can add prefixes to the paths for the routes themselves (see Explicit route paths).

Adapting existing usage of @fastify/autoload

If you're using @fastify/autoload in your Fastify application, there are a couple of approaches you can take when migrating the app to Platformatic Service:

  • Configure plugins in your Platformatic Service app's platformatic.service.json. It will then take care of loading your routes and plugins for you with @fastify/autoload (configuration documentation).
  • You can continue to use @fastify/autoload directly with a little refactoring. See the tips in the Refactoring Fastify server factories section.

Migrating tests

You'll generally use the Platformatic CLI to start your Platformatic Service app (npx platformatic start). However for testing, you can use the programmatic API provided by Platformatic Service. This allows you to load your app in your test scripts and then run tests against it.

If you copy over the tests from your existing Fastify app, they will typically only require a small amount of refactoring to work with Platformatic Service.

Replacing your Fastify server factory function

The example Fastify app has a buildApp() factory function which creates a Fastify server instance. The import line for that function can be removed from tests/routes.test.js:

// tests/routes.test.js

import { buildApp } from '../app.js'

And replaced with an import of the buildServer() function from @platformatic/service:

// tests/routes.test.js

import { buildServer } from '@platformatic/service'

You can then load your Platformatic Service app like this:


const app = await buildServer('./platformatic.service.json')

Disabling server logging in your tests

If you have logged enabled for your Platformatic Service app, you'll probably want to disable the logging in your tests to remove noise from the output that you receive when you run your tests.

Instead of passing the path to your app's configuration to buildServer(), you can import the app configuration and disable logging:

// tests/routes.test.js

import serviceConfig from '../platformatic.service.json' assert { type: 'json' }

serviceConfig.server.logger = false

Then pass that serviceConfig configuration object to the buildServer() function:

// tests/routes.test.js

const app = await buildServer(serviceConfig)

Import assertions — the assert { type: 'json' } syntax — are not a stable feature of the JavaScript language, so you'll receive warning messages from Node.js when running your tests. You can disable these warnings by passing the --no-warnings flag to node.

Building apps with Platformatic Service

Because Platformatic Service is built on top of the Fastify framework, you're able to use the full functionality of the Fastify framework in your Platformatic Service app. This includes:

  • Fast, structured logging, provided by Pino
  • Request validation with JSON Schema and Ajv (other validation libraries are supported too)
  • Hooks, which allow fine grained control over when code is run during the request/response lifecycle.
  • Decorators, which allow you to customize core Fastify objects and write more modular code.

Platformatic Service also provides many other features that are built on top of Fastify.

Application features

All Platformatic Service features are fully configurable via platformatic.service.json.

Development features

  • Hot reloading — Your server will automatically reload in development as you develop features.
  • Write your plugins in JavaScript or TypeScript — TypeScript support is provided out-of-the-box and supports hot reloading.
  • Pretty printed logs — Making it easier to understand and debug your application during development.

See the Platformatic Service Configuration documentation for all of the features which can be configured.

Next steps

The documentation for Platformatic Service is a helpful reference when building a Platformatic Service app.

Watch: Understand the parts of a Platformatic app

You want to be confident that you understand how your applications work. In this video you'll learn about the parts that make up a Platformatic application, what each part does, and how they fit together.

Our series of Platformatic How-to videos can help get you up and running building apps with Platformatic open-source tools.

Got questions or need help migrating your Fastify app to use Platformatic Service? Drop by our Discord server and we'll be happy to help you.

+ + + + \ No newline at end of file diff --git a/docs/0.41.3/guides/monitoring/index.html b/docs/0.41.3/guides/monitoring/index.html new file mode 100644 index 00000000000..e0aede7da20 --- /dev/null +++ b/docs/0.41.3/guides/monitoring/index.html @@ -0,0 +1,24 @@ + + + + + +Monitoring with Prometheus and Grafana | Platformatic Open Source Software + + + + + +
+
Version: 0.41.3

Monitoring with Prometheus and Grafana

Prometheus is open source system and alerting toolkit for monitoring and alerting. It's a time series database that collects metrics from configured targets at given intervals, evaluates rule expressions, displays the results, and can trigger alerts if some condition is observed to be true. +Grafana is an open source visualization and analytics software.

It's a pretty common solution to use Prometheus to collect and store monitoring data, and Grafana to visualize it.

Platformatic can be configured to expose Prometheus metrics:

...
"metrics": {
"port": 9091,
"auth": {
"username": "platformatic",
"password": "mysecret"
}
}
...

In this case, we are exposing the metrics on port 9091 (defaults to 9090), and we are using basic authentication to protect the endpoint. +We can also specify the IP address to bind to (defaults to 0.0.0.0). +Note that the metrics port is not the default in this configuration. This is because if you want to test the integration running both Prometheus and Platformatic on the same host, Prometheus starts on 9090 port too. +All the configuration settings are optional. To use the default settings, set "metrics": true. See the configuration reference for more details.

caution

Use environment variable placeholders in your Platformatic DB configuration file to avoid exposing credentials.

Prometheus Configuration

This is an example of a minimal Prometheus configuration to scrape the metrics from Platformatic:

global:
scrape_interval: 15s
scrape_timeout: 10s
evaluation_interval: 1m
scrape_configs:
- job_name: 'platformatic'
scrape_interval: 2s
metrics_path: /metrics
scheme: http
basic_auth:
username: platformatic
password: mysecret
static_configs:
- targets: ['192.168.69.195:9091']
labels:
group: 'platformatic'

We specify a target configuring the IP address and the port where Platformatic is running, and we specify the username and password to use for basic authentication. The metrics path is the one used by Platformatic. The ip address is not a loopback address so this will work even with Prometheus running in docker on the same host (see below), please change it to your host ip.

To test this configuration, we can run Prometheus locally using docker and docker-compose, so please be sure to have both correctly installed. +Save the above configuration in a file named ./prometheus/prometheus.yml and create a docker-compose.yml:

version: "3.7"

services:
prometheus:
image: prom/prometheus:latest
volumes:
- prometheus_data:/prometheus
- ./prometheus/prometheus.yml:/etc/prometheus/prometheus.yml
command:
- '--config.file=/etc/prometheus/prometheus.yml'
ports:
- '9090:9090'

volumes:
prometheus_data: {}

Then run docker-compose up -d and open http://localhost:9090 in your browser. You should see the Prometheus dashboard, and you can also query the metrics, e.g. {group="platformatic"}. See Prometheus docs for more information on querying and metrics.

Grafana Configuration

Let's see how we can configure Grafana to chart some Platformatics metrics from Prometheus. +Change the docker-compose.yml to add a grafana service:

version: "3.7"
services:

prometheus:
image: prom/prometheus:latest
volumes:
- prometheus_data:/prometheus
- ./prometheus/prometheus.yml:/etc/prometheus/prometheus.yml
command:
- '--config.file=/etc/prometheus/prometheus.yml'
ports:
- '9090:9090'

grafana:
image: grafana/grafana:latest
volumes:
- grafana_data:/var/lib/grafana
environment:
- GF_SECURITY_ADMIN_PASSWORD=pleasechangeme
depends_on:
- prometheus
ports:
- '3000:3000'

volumes:
prometheus_data: {}
grafana_data: {}

In Grafana, select Configuration -> Data Sources -> Add Data Source, and select Prometheus. +In the URL field, specify the URL of the Prometheus server, e.g. http://prometheus:9090 (the name of the service in the docker-compose file), then Save & Test.

Now we can create a dashboard and add panels to it. Select the Prometheus data source, and add queries. You should see the metrics exposed by Platformatic.

It's also possible to import pre-configured dashboards, like this one from Grafana.com.

+ + + + \ No newline at end of file diff --git a/docs/0.41.3/guides/packaging-an-application-as-a-module/index.html b/docs/0.41.3/guides/packaging-an-application-as-a-module/index.html new file mode 100644 index 00000000000..dc493b0b258 --- /dev/null +++ b/docs/0.41.3/guides/packaging-an-application-as-a-module/index.html @@ -0,0 +1,27 @@ + + + + + +Packaging a Platformatic Application as a module | Platformatic Open Source Software + + + + + +
+
Version: 0.41.3

Packaging a Platformatic Application as a module

Platformatic Service and Platformatic DB +offer a good starting point to create new applications. However, most developers or organizations might want to +create reusable services or applications built on top of Platformatic. +This is useful to publish the application on the public npm registry (or a private one!), including building your own CLI, +or to create a specialized template for your organization to allow for centralized bugfixes and updates.

This process is the same one we use to maintain Platformatic DB and Platformatic Composer on top of Platformatic Service.

Creating a custom Service

We are creating the module foo.js as follows:

const { schema, platformaticService } = require('@platformatic/service')

/** @type {import('fastify').FastifyPluginAsync<{}>} */
async function foo (app, opts) {
const text = app.platformatic.config.foo.text
app.get('/foo', async (request, reply) => {
return text
})

await platformaticService(app, opts)
}

foo.configType = 'foo'

// break Fastify encapsulation
foo[Symbol.for('skip-override')] = true

// The schema for our configuration file
foo.schema = {
$id: 'https://example.com/schemas/foo.json',
title: 'Foo Service',
type: 'object',
properties: {
server: schema.server,
plugins: schema.plugins,
metrics: schema.metrics,
watch: {
anyOf: [schema.watch, {
type: 'boolean'
}, {
type: 'string'
}]
},
$schema: {
type: 'string'
},
module: {
type: 'string'
},
foo: {
type: 'object',
properties: {
text: {
type: 'string'
}
},
required: ['text']
}
},
additionalProperties: false,
required: ['server']
}

// The configuration for the ConfigManager
foo.configManagerConfig = {
schema: foo.schema,
envWhitelist: ['PORT', 'HOSTNAME'],
allowToWatch: ['.env'],
schemaOptions: {
useDefaults: true,
coerceTypes: true,
allErrors: true,
strict: false
}
}

module.exports = foo

Note that the $id property of the schema identifies the module in our system, +allowing us to retrieve the schema correctly. +It is recommended, but not required, that the JSON schema is actually +published in this location. Doing so allows tooling such as the VSCode +language server to provide autocompletion.

In this example, the schema adds a custom top-level foo property +that users can use to configure this specific module.

ESM is also supported.

Consuming a custom application

Consuming foo.js is simple. We can create a platformatic.json file as follows:

{
"$schema": "https://example.com/schemas/foo.json",
"module": "./foo",
"server": {
"port": 0,
"hostname": "127.0.0.1"
},
"foo": {
"text": "Hello World"
}
}

Note that we must specify both the $schema property and module. +Module can also be any modules published on npm and installed via your package manager.

Building your own CLI

It is possible to build your own CLI with the following cli.mjs file:

import foo from './foo.js'
import { start } from '@platformatic/service'
import { printAndExitLoadConfigError } from '@platformatic/config'

await start(foo, process.argv.splice(2)).catch(printConfigValidationErrors)

This will also load platformatic.foo.json files.

+ + + + \ No newline at end of file diff --git a/docs/0.41.3/guides/prisma/index.html b/docs/0.41.3/guides/prisma/index.html new file mode 100644 index 00000000000..e44f0bcec08 --- /dev/null +++ b/docs/0.41.3/guides/prisma/index.html @@ -0,0 +1,17 @@ + + + + + +Integrate Prisma with Platformatic DB | Platformatic Open Source Software + + + + + +
+
Version: 0.41.3

Integrate Prisma with Platformatic DB

Prisma is an open-source ORM for Node.js and TypeScript. It is used as an alternative to writing SQL, or using another database access tool such as SQL query builders (like knex.js) or ORMs (like TypeORM and Sequelize). Prisma currently supports PostgreSQL, MySQL, SQL Server, SQLite, MongoDB, and CockroachDB.

Prisma can be used with JavaScript or TypeScript, and provides a level to type-safety that goes beyond the guarantees made by other ORMs in the TypeScript ecosystem. You can find an in-depth comparison of Prisma against other ORMs here.

If you want to get a quick overview of how Prisma works, you can follow the Quickstart or read the Introduction in the Prisma documentation.

How Prisma can improve your workflow with Platformatic DB

While Platformatic speeds up development of your REST and GraphQL APIs, Prisma can complement the workflow in several ways:

  1. Provides an intuitive data modeling language
  2. Provides auto-generated and customizable SQL migrations
  3. Provides type-safety and auto-completion for your database queries

You can learn more about why Prisma and Platformatic are a great match this article.

Prerequisites

To follow along with this guide, you will need to have the following:

Setup Prisma

Install the Prisma CLI and the db-diff development dependencies in your project:

npm install --save-dev prisma @ruheni/db-diff

Next, initialize Prisma in your project

npx prisma init

This command does the following:

  • Creates a new directory called prisma which contains a file called schema.prisma. This file defines your database connection and the Prisma Client generator.
  • Creates a .env file at the root of your project if it doesn't exist. This defines your environment variables (used for your database connection).

You can specify your preferred database provider using the --datasource-provider flag, followed by the name of the provider:

npx prisma init --datasource-provider postgresql # or sqlite, mysql, sqlserver, cockroachdb

Prisma uses the DATABASE_URL environment variable to connect to your database to sync your database and Prisma schema. It also uses the variable to connect to your database to run your Prisma Client queries.

If you're using PostgreSQL, MySQL, SQL Server, or CockroachDB, ensure that the DATABASE_URL used by Prisma is the same as the one used by Platformatic DB project. If you're using SQLite, refer to the Using Prisma with SQLite section.

If you have an existing project, refer to the Adding Prisma to an existing Platformatic DB project section. If you're adding Prisma to a new project, refer to the Adding Prisma to a new project.

Adding Prisma to an existing project

If you have an existing Platformatic DB project, you can introspect your database and generate the data model in your Prisma schema with the following command:

npx prisma db pull

The command will introspect your database and generate the data model

Next, add the @@ignore attribute to the versions model to exclude it from the Prisma Client API:

model versions {
version BigInt @id
name String?
md5 String?
run_at DateTime? @db.Timestamptz(6)

+ @@ignore
}

To learn how you can evolve your database schema, you can jump to the Evolving your database schema section.

Adding Prisma to a new project

Define a Post model with the following fields at the end of your schema.prisma file:

prisma/schema.prisma
model Post {
id Int @id @default(autoincrement())
title String
content String?
published Boolean @default(false)
viewCount Int @default(0)
createdAt DateTime @default(now())

@@map("posts")
}

The snippet above defines a Post model with the following fields and properties:

  • id: An auto-incrementing integer that will be the primary key for the model.
  • title: A non-nullable String field.
  • content: A nullable String field.
  • published: A Boolean field with a default value of false.
  • viewCount: An Int field with a default value of 0.
  • createdAt: A DateTime field with a timestamp of when the value is created as its default value.

By default, Prisma maps the model name and its format to the table name — which is also used im Prisma Client. Platformatic DB uses a snake casing and pluralized table names to map your table names to the generated API. The @@map() attribute in the Prisma schema allows you to define the name and format of your table names to be used in your database. You can also use the @map() attribute to define the format for field names to be used in your database. Refer to the Foreign keys and table names naming conventions section to learn how you can automate formatting foreign keys and table names.

Next, run the following command to generate an up and down migration:

npx db-diff

The previous command will generate both an up and down migration based on your schema. The generated migration is stored in your ./migrations directory. If you are currently using a different path to store the migration, you can provide the --migrations-dir flag followed by the path.

You can then apply the generated migration using the Platformatic DB CLI:

npx platformatic db migrations apply

Platformatic uses Postgrator to run migrations. Postgrator creates a table in the database called versions to track the applied migrations. Since the versions table is not yet captured in the Prisma schema, run the following command to introspect the database and populate it with the missing model:

npx prisma db pull

Introspecting the database to populate the model prevents including the versions table in the generated down migrations.

Your Prisma schema should now contain a versions model that is similar to this one (it will vary depending on the database system you're using):

model Post {
id Int @id @default(autoincrement())
title String
content String?
published Boolean @default(false)
viewCount Int @default(0)
createdAt DateTime @default(now())

@@map("posts")
}

+model versions {
+ version BigInt @id
+ name String?
+ md5 String?
+ run_at DateTime? @db.Timestamptz(6)
+}

Add the @@ignore attribute function to the model to exclude it from the Prisma Client API:

model versions {
version BigInt @id
name String?
md5 String?
run_at DateTime? @db.Timestamptz(6)

+ @@ignore
}

Evolving your database schema

Update the data model in your Prisma schema by adding a model or a field:

// based on the schema in the "Adding Prisma to a new project" section
+model User {
+ id Int @id @default(autoincrement())
+ email String @unique
+ name String?
+ posts Post[]
+
+ @@map("users")
+}

model Post {
id Int @id @default(autoincrement())
createdAt DateTime @default(now())
title String
content String?
published Boolean @default(false)
viewCount Int @default(0)
+ author User? @relation(fields: [authorId], references: [id])
+ authorId Int? @map("author_id")

@@map("posts")
}

Next, use the @ruheni/db-diff CLI tool to generate up and down migrations:

npx db-diff

This command will generate up and down migrations based off of your Prisma schema. If you are currently using a different path to store the migration, you can provide the --migrations-dir flag followed by the path.

Next, apply the generated migration using the Platformatic CLI:

npx platformatic db migrations apply

And you're done!

Using Prisma Client in your plugins

Plugins allow you to add custom functionality to your REST and GraphQL API. Refer to the Add Custom Functionality to learn more how you can add custom functionality.

danger

Prisma Client usage with Platformatic is currently only supported in Node v18

You can use Prisma Client to interact with your database in your plugin.

To get started, run the following command:

npx prisma generate

The above command installs the @prisma/client in your project and generates a Prisma Client based off of your Prisma schema.

Install @sabinthedev/fastify-prisma fastify plugin. The plugin takes care of shutting down database connections and makes Prisma Client available as a Fastify plugin.

npm install @sabinthedev/fastify-prisma

Register the plugin and extend your REST API:

// 1.
const prismaPlugin = require("@sabinthedev/fastify-prisma")

module.exports = async (app) => {
app.log.info('plugin loaded')

// 2.
app.register(prismaPlugin)

/**
* Plugin logic
*/
// 3.
app.put('/post/:id/views', async (req, reply) => {

const { id } = req.params

// 4.
const post = await app.prisma.post.update({
where: {
id: Number(id)
},
data: {
viewCount: {
increment: 1
}
}
})

// 5.
return reply.send(post)
})
}

The snippet does the following:

  1. Imports the plugin
  2. Registers the @sabinthedev/fastify-prisma
  3. Defines the endpoint for incrementing the views of a post
  4. Makes a query to the database on the Post model to increment a post's view count
  5. Returns the updated post on success

If you would like to extend your GraphQL API, extend the schema and define the corresponding resolver:

plugin.js
// ./plugin.js
const prismaPlugin = require("@sabinthedev/fastify-prisma")

module.exports = async (app) => {
app.log.info('plugin loaded')

app.graphql.extendSchema(`
extend type Mutation {
incrementPostViewCount(id: ID): Post
}
`)

app.graphql.defineResolvers({
Mutation: {
incrementPostViewCount: async (_, { id }) => {
const post = await prisma.post.update({
where: {
id: Number(id)
},
data: {
viewCount: {
increment: 1
}
}
})

if (!post) throw new Error(`Post with id:${id} was not found`)
return post
}
}
})
}

Start the server:

npx platformatic db start

The query should now be included in your GraphQL schema.

You can also use the Prisma Client in your REST API endpoints.

Workarounds

Using Prisma with SQLite

Currently, Prisma doesn't resolve the file path of a SQLite database the same way as Platformatic does.

If your database is at the root of the project, create a new environment variable that Prisma will use called PRISMA_DATABASE_URL:

# .env
DATABASE_URL="sqlite://db.sqlite"
PRISMA_DATABASE_URL="file:../db.sqlite"

Next, update the url value in the datasource block in your Prisma schema with the updated value:

prisma/schema.prisma
// ./prisma/schema.prisma
datasource db {
provider = "sqlite"
url = env("PRISMA_DATABASE_URL")
}

Running migrations should now work smoothly and the path will be resolved correctly.

Foreign keys, field, and table names naming conventions

Foreign key names should use underscores, e.g. author_id, for Platformatic DB to correctly map relations. You can use the @map("") attribute to define the names of your foreign keys and field names to be defined in the database.

Table names should be mapped to use the naming convention expected by Platformatic DB e.g. @@map("recipes") (the Prisma convention is Recipe, which corresponds with the model name).

You can use prisma-case-format to enforce your own database conventions, i.e., pascal, camel, and snake casing.

Learn more

If you would like to learn more about Prisma, be sure to check out the Prisma docs.

+ + + + \ No newline at end of file diff --git a/docs/0.41.3/guides/securing-platformatic-db/index.html b/docs/0.41.3/guides/securing-platformatic-db/index.html new file mode 100644 index 00000000000..6efa940c9d4 --- /dev/null +++ b/docs/0.41.3/guides/securing-platformatic-db/index.html @@ -0,0 +1,31 @@ + + + + + +Securing Platformatic DB with Authorization | Platformatic Open Source Software + + + + + +
+
Version: 0.41.3

Securing Platformatic DB with Authorization

Introduction

Authorization in Platformatic DB is role-based. User authentication and the +assignment of roles must be handled by an external authentication service. +Take a look to at the reference documentation for Authorization.

The goal of this simple guide is to protect an API built with Platformatic DB +with the use of a shared secret, that we call adminSecret. We want to prevent +any user that is not an admin to access the data.

The use of an adminSecret is a simplistic way of securing a system. +It is a crude way for limiting access and not suitable for production systems, +as the risk of leaking the secret is high in case of a security breach. +A production friendly way would be to issue a machine-to-machine JSON Web Token, +ideally with an asymmetric key. Alternatively, you can defer to an external +service via a Web Hook.

Please refer to our guide to set up Auth0 for more information +on JSON Web Tokens.

Block access to all entities, allow admins

The following configuration will block all anonymous users (e.g. each user without a known role) +to access every entity:

{
...
"authorization": {
"adminSecret": "replaceWithSomethingRandomAndSecure"
}
}

The data will still be available if the X-PLATFORMATIC-ADMIN-SECRET HTTP header +is specified when making HTTP calls, like so:

curl -H 'X-PLATFORMATIC-ADMIN-SECRET: replaceWithSomethingRandomAndSecure' http://127.0.0.1:3042/pages
info

Configuring JWT or Web Hooks will have the same result of configuring an admin secret.

Authorization rules

Rules can be provided based on entity and role in order to restrict access and provide fine grained access. +To make an admin only query and save the page table / page entity using adminSecret this structure should be used in the platformatic.db configuration file:

  ...
"authorization": {
"adminSecret": "easy",
"rules": [{
"entity": "movie"
"role": "platformatic-admin",
"find": true,
"save": true,
"delete": false,
}
]
}
info

Note that the role of an admin user from adminSecret strategy is platformatic-admin by default.

Read-only access to anonymous users

The following configuration will allo all anonymous users (e.g. each user without a known role) +to access the pages table / page entity in Read-only mode:

{
...
"authorization": {
"adminSecret": "replaceWithSomethingRandomAndSecure"
"rules": [{
"role": "anonymous",
"entity": "page",
"find": true,
"save": false,
"delete": false
}]
}
}

Note that we set find as true to allow the access, while the other options are false.

Work in Progress

This guide is a Work-In-Progress. Let us know what other common authorization use cases we should cover.

+ + + + \ No newline at end of file diff --git a/docs/0.41.3/guides/seed-a-database/index.html b/docs/0.41.3/guides/seed-a-database/index.html new file mode 100644 index 00000000000..238bb2aad8a --- /dev/null +++ b/docs/0.41.3/guides/seed-a-database/index.html @@ -0,0 +1,21 @@ + + + + + +Seed a Database | Platformatic Open Source Software + + + + + +
+
Version: 0.41.3

Seed a Database

A database is as useful as the data that it contains: a fresh, empty database +isn't always the best starting point. We can add a few rows from our migrations +using SQL, but we might need to use JavaScript from time to time.

The platformatic db seed command allows us to run a +script that will populate — or "seed" — our database.

Example

Our seed script should export a Function that accepts an argument: +an instance of @platformatic/sql-mapper.

seed.js
'use strict'

module.exports = async function ({ entities, db, sql }) {
await entities.graph.save({ input: { name: 'Hello' } })
await db.query(sql`
INSERT INTO graphs (name) VALUES ('Hello 2');
`)
}

We can then run the seed script with the Platformatic CLI:

npx platformatic db seed seed.js
+ + + + \ No newline at end of file diff --git a/docs/0.41.3/guides/telemetry/index.html b/docs/0.41.3/guides/telemetry/index.html new file mode 100644 index 00000000000..3a1a641e76e --- /dev/null +++ b/docs/0.41.3/guides/telemetry/index.html @@ -0,0 +1,21 @@ + + + + + +Telemetry with Jaeger | Platformatic Open Source Software + + + + + +
+
Version: 0.41.3

Telemetry with Jaeger

Introduction

Platformatic supports Open Telemetry integration. This allows you to send telemetry data to one of the OTLP compatible servers (see here) or to a Zipkin server. Let's show this with Jaeger.

Jaeger setup

The quickest way is to use docker:

docker run -d --name jaeger \
-e COLLECTOR_OTLP_ENABLED=true \
-p 16686:16686 \
-p 4317:4317 \
-p 4318:4318 \
jaegertracing/all-in-one:latest

Check that the server is running by opening http://localhost:16686/ in your browser.

Platformatic setup

Will test this with a Platformatic Composer that proxy requests to a Platformatic Service, which in turn invokes a Platformatic DB Service. +In this way we show that the telemetry is propagated from the Composer throughout the services and the collected correctly. +Let's setup all this components:

Platformatic DB Service

Create a folder for DB and cd into it:

mkdir test-db
cd test-db

Then create a db in the folder using npx create-platformatic@latest:

npx create-platformatic@latest

To make it simple, use sqlite and create/apply the default migrations. This DB Service is exposed on port 5042:


➜ npx create-platformatic@latest

Hello user, welcome to Platformatic 0.32.0!
Let's start by creating a new project.
? Which kind of project do you want to create? DB
? Where would you like to create your project? .
? What database do you want to use? SQLite
? Do you want to use the connection string "sqlite://./db.sqlite"? Confirm
? Do you want to create default migrations? yes
? Do you want to create a plugin? no
? Do you want to use TypeScript? no
? What port do you want to use? 5042
[15:40:46] INFO: Configuration file platformatic.db.json successfully created.
[15:40:46] INFO: Environment file .env successfully created.
[15:40:46] INFO: Migrations folder migrations successfully created.
[15:40:46] INFO: Migration file 001.do.sql successfully created.
[15:40:46] INFO: Migration file 001.undo.sql successfully created.
[15:40:46] INFO: Plugin file created at plugin.js
? Do you want to run npm install? no
? Do you want to apply migrations? yes
...done!
? Do you want to generate types? no
? Do you want to create the github action to deploy this application to Platformatic Cloud dynamic workspace? no
? Do you want to create the github action to deploy this application to Platformatic Cloud static workspace? no

All done! Please open the project directory and check the README.
Will test this in one example with a Platformatic Composer that proxy requests to a Platformatic Service, which in turn invokes a Platformatic DB.

Open the platformatic.db.json file and add the telementry configuration:

  "telemetry": {
"serviceName": "test-db",
"exporter": {
"type": "otlp",
"options": {
"url": "http://localhost:4318/v1/traces"
}
}
}

Finally, start the DB service:

npx platformatic db start

Platformatic Service

Create at the same level of test-db another folder for Service and cd into it:

mkdir test-service
cd test-service

Then create a service on the 5043 port in the folder using npx create-platformatic@latest:

➜ npx create-platformatic@latest

Hello user, welcome to Platformatic 0.32.0!
Let's start by creating a new project.
? Which kind of project do you want to create? Service
? Where would you like to create your project? .
? Do you want to run npm install? no
? Do you want to use TypeScript? no
? What port do you want to use? 5043
[15:55:35] INFO: Configuration file platformatic.service.json successfully created.
[15:55:35] INFO: Environment file .env successfully created.
[15:55:35] INFO: Plugins folder "plugins" successfully created.
[15:55:35] INFO: Routes folder "routes" successfully created.
? Do you want to create the github action to deploy this application to Platformatic Cloud dynamic workspace? no
? Do you want to create the github action to deploy this application to Platformatic Cloud static workspace? no

Open the platformatic.service.json file and add the following telemetry configuration (it's exactly the same as DB, but with a different serviceName)

  "telemetry": {
"serviceName": "test-service",
"exporter": {
"type": "otlp",
"options": {
"url": "http://localhost:4318/v1/traces"
}
}
}

We want this service to invoke the DB service, so we need to add a client for test-db to it:

npx platformatic client http://127.0.0.1:5042 js --name movies

Check platformatic.service.json to see that the client has been added (PLT_MOVIES_URL is defined in .env):

    "clients": [
{
"schema": "movies/movies.openapi.json",
"name": "movies",
"type": "openapi",
"url": "{PLT_MOVIES_URL}"
}
]

Now open routes/root.js and add the following:

  fastify.get('/movies-length', async (request, reply) => {
const movies = await request.movies.getMovies()
return { length: movies.length }
})

This code calls movies to get all the movies and returns the length of the array.

Finally, start the service:

npx platformatic service start

Platformatic Composer

Create at the same level of test-db and test-service another folder for Composer and cd into it:

mkdir test-composer
cd test-composer

Then create a composer on the 5044 port in the folder using npx create-platformatic@latest:

➜ npx create-platformatic@latest

Hello marcopiraccini, welcome to Platformatic 0.32.0!
Let's start by creating a new project.
? Which kind of project do you want to create? Composer
? Where would you like to create your project? .
? What port do you want to use? 5044
? Do you want to run npm install? no
[16:05:28] INFO: Configuration file platformatic.composer.json successfully created.
[16:05:28] INFO: Environment file .env successfully created.
? Do you want to create the github action to deploy this application to Platformatic Cloud dynamic workspace? no
? Do you want to create the github action to deploy this application to Platformatic Cloud static workspace? no

All done! Please open the project directory and check the README.

Open platformatic.composer.js and change it to the following:

{
"$schema": "https://platformatic.dev/schemas/v0.32.0/composer",
"server": {
"hostname": "{PLT_SERVER_HOSTNAME}",
"port": "{PORT}",
"logger": {
"level": "{PLT_SERVER_LOGGER_LEVEL}"
}
},
"composer": {
"services": [
{
"id": "example",
"origin": "http://127.0.0.1:5043",
"openapi": {
"url": "/documentation/json"
}
}
],
"refreshTimeout": 3000
},
"telemetry": {
"serviceName": "test-composer",
"exporter": {
"type": "otlp",
"options": {
"url": "http://localhost:4318/v1/traces"
}
}
},
"watch": true
}

Note that we just added test-service as origin of the proxed service and added the usual telementry configuration, with a different serviceName.

Finally, start the composer:

npx platformatic composer start

Run the Test

Check that the composer is exposing movies-length opening: http://127.0.0.1:5044/documentation/

You should see: +image

To add some data, we can POST directly to the DB service (port 5042):

curl -X POST -H "Content-Type: application/json" -d '{"title":"The Matrix"}' http://127.0.0.1:5042/movies 
curl -X POST -H "Content-Type: application/json" -d '{"title":"The Matrix Reloaded"}' http://127.0.0.1:5042/movies

Now, let's check that the composer (port 5044) is working:

curl http://127.0.0.1:5044/movies-length

If the composer is working correctly, you should see:

{"length":2}

However, the main interest of this example is to show how to use the Platformatic Telemetry, so let's check it. +Open the Jaeger UI at http://localhost:16686/ and you should see something like this:

image

Select on the left the test-composer service and the GET /movies-length operation, click on "Find traces" and you should see something like this:

image

You can then click on the trace and see the details:

image

Note that everytime a request is received or client call is done, a new span is started. So we have:

  • One span for the request received by the test-composer
  • One span for the client call to test-service
  • One span for the request received by test-service
  • One span for the client call to test-db
  • One span for the request received by test-db

All these spans are linked together, so you can see the whole trace.

What if you want to use Zipkin?

Starting from this example, it's also possible to run the same test using Zipkin. To do so, you need to start the Zipkin server:

docker run -d -p 9411:9411 openzipkin/zipkin

Then, you need to change the telemetry configuration in all the platformatic.*.json to the following (only the exporter object is different`)

  "telemetry": {
(...)
"exporter": {
"type": "zipkin",
"options": {
"url": "http://127.0.0.1:9411/api/v2/spans"
}
}
}

The zipkin ui is available at http://localhost:9411/

+ + + + \ No newline at end of file diff --git a/docs/0.41.3/platformatic-cloud/deploy-database-neon/index.html b/docs/0.41.3/platformatic-cloud/deploy-database-neon/index.html new file mode 100644 index 00000000000..0747e5724d8 --- /dev/null +++ b/docs/0.41.3/platformatic-cloud/deploy-database-neon/index.html @@ -0,0 +1,32 @@ + + + + + +Deploy a PostgreSQL database with Neon | Platformatic Open Source Software + + + + + +
+
Version: 0.41.3

Deploy a PostgreSQL database with Neon

Neon offers multi-cloud fully managed +Postgres with a generous free tier. They separated storage and +compute to offer autoscaling, branching, and bottomless storage. +It offers a great environment for creating database preview +environments for your Platformatic DB +applications.

This guide shows you how to integrate Neon branch deployments with your +Platformatic app's GitHub Actions workflows. It assumes you have already +followed the Quick Start Guide.

Create a project on Neon

To set up an account with Neon, open their website, sign up and create a +new project.

Take note of the following configuration setting values:

  • The connection string for your main branch database, to be stored in a NEON_DB_URL_PRODUCTION secret
  • The Project ID (available under the project Settings), to be stored in a NEON_PROJECT_ID secret
  • Your API key (available by clicking on your user icon > Account > Developer settings), to be stored under NEON_API_KEY

You can learn more about Neon API keys in their Manage API Keys documentation.

Configure Github Environments and Secrets

Now you need to set the configuration values listed above as +repository secrets +on your project's GitHub repository. +Learn how to use environments for deployment in GitHub's documentation.

Configure the GitHub Environments for your repository to have:

  • production secrets, available only to the main branch:
    • NEON_DB_URL_PRODUCTION
  • previews secrets available to all branches:
    • NEON_PROJECT_ID
    • NEON_API_KEY

Configure the main branch workflow

Replace the contents of your app's workflow for static workspace deployment:

.github/workflows/platformatic-static-workspace-deploy.yml
name: Deploy Platformatic application to the cloud
on:
push:
branches:
- main
paths-ignore:
- 'docs/**'
- '**.md'

jobs:
build_and_deploy:
environment:
name: production
permissions:
contents: read
runs-on: ubuntu-latest
steps:
- name: Checkout application project repository
uses: actions/checkout@v4
- name: npm install --omit=dev
run: npm install --omit=dev
- name: Deploy project
uses: platformatic/onestep@latest
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
platformatic_workspace_id: <YOUR_STATIC_WORKSPACE_ID>
platformatic_workspace_key: ${{ secrets.PLATFORMATIC_STATIC_WORKSPACE_API_KEY }}
platformatic_config_path: ./platformatic.db.json
secrets: DATABASE_URL
env:
DATABASE_URL: ${{ secrets.NEON_DB_URL_PRODUCTION }}
PLT_SERVER_LOGGER_LEVEL: info
PORT: 3042
PLT_SERVER_HOSTNAME: 127.0.0.1

Replace <YOUR_STATIC_WORKSPACE_ID> with the workspace ID that you previously had in this file.

When your app is deployed to the static workspace it will now be configured to connect to the +main branch database for your Neon project.

Configure the preview environment workflow

Neon allows up to 10 database branches on their free tier. You can automatically create a new +database branch when a pull request is opened, and then automatically remove it when the pull +request is merged.

GitHub Action to create a preview environment

Replace the contents of your app's workflow for dynamic workspace deployment:

.github/workflows/platformatic-dynamic-workspace-deploy.yml
name: Deploy to Platformatic cloud
on:
pull_request:
paths-ignore:
- 'docs/**'
- '**.md'

# This allows a subsequently queued workflow run to interrupt previous runs
concurrency:
group: "${{ github.workflow }} @ ${{ github.event.pull_request.head.label || github.head_ref || github.ref }}"
cancel-in-progress: true

jobs:
build_and_deploy:
runs-on: ubuntu-latest
environment:
name: development
steps:
- name: Checkout application project repository
uses: actions/checkout@v4
- name: npm install --omit=dev
run: npm install --omit=dev
- name: Get PR number
id: get_pull_number
run: |
pull_number=$(jq --raw-output .pull_request.number "$GITHUB_EVENT_PATH")
echo "pull_number=${pull_number}" >> $GITHUB_OUTPUT
echo $pull_number
- uses: neondatabase/create-branch-action@v4
with:
project_id: ${{ secrets.NEON_PROJECT_ID }}
branch_name: pr-${{ steps.get_pull_number.outputs.pull_number }}
api_key: ${{ secrets.NEON_API_KEY }}
id: create-branch
- name: Deploy project
uses: platformatic/onestep@latest
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
platformatic_workspace_id: ${{ secrets.PLATFORMATIC_DYNAMIC_WORKSPACE_ID }}
platformatic_workspace_key: ${{ secrets.PLATFORMATIC_DYNAMIC_WORKSPACE_KEY }}
platformatic_config_path: ./platformatic.db.json
env:
DATABASE_URL: ${{ steps.create-branch.outputs.db_url }}
PLT_SERVER_LOGGER_LEVEL: info
PORT: 3042
PLT_SERVER_HOSTNAME: 127.0.0.1

Replace <YOUR_DYNAMIC_WORKSPACE_ID> with the workspace ID that you previously had in this file.

Configure preview environment cleanup

After a pull request to the main branch is merged, you should remove the matching database branch.

Create a new file, .github/workflows/cleanup-neon-branch-db.yml, and copy and paste in the following +workflow configuration:

.github/workflows/cleanup-neon-branch-db.yml
name: Cleanup Neon Database Branch
on:
push:
branches:
- 'main'
jobs:
delete-branch:
environment:
name: development
permissions: write-all
runs-on: ubuntu-latest
steps:
- name: Get PR info
id: get-pr-info
uses: actions-ecosystem/action-get-merged-pull-request@v1.0.1
with:
github_token: ${{secrets.GITHUB_TOKEN}}
- run: |
echo ${{ steps.get-pr-info.outputs.number}}
- name: Delete Neon Branch
if: ${{ steps.get-pr-info.outputs.number }}
uses: neondatabase/delete-branch-action@v3
with:
project_id: ${{ secrets.NEON_PROJECT_ID }}
branch: pr-${{ steps.get-pr-info.outputs.number }}
api_key: ${{ secrets.NEON_API_KEY }}

Deployment

To deploy these changes to your app:

  1. Create a Git branch locally (git checkout -b <BRANCH_NAME>)
  2. Commit your changes and push them to GitHub
  3. Open a pull request on GitHub - a branch will automatically be created for your Neon database and a preview app will be deployed to Platformatic Cloud (in your app's dynamic workspace).
  4. Merge the pull request - the Neon databsase branch will be automatically deleted and your app will be deployed to Platformatic Cloud (in your app's static workspace).
+ + + + \ No newline at end of file diff --git a/docs/0.41.3/platformatic-cloud/pricing/index.html b/docs/0.41.3/platformatic-cloud/pricing/index.html new file mode 100644 index 00000000000..dd917a1641a --- /dev/null +++ b/docs/0.41.3/platformatic-cloud/pricing/index.html @@ -0,0 +1,23 @@ + + + + + +Platformatic Cloud Pricing | Platformatic Open Source Software + + + + + +
+
Version: 0.41.3

Platformatic Cloud Pricing

Find the plan that works best for you!

FreeBasicAdvancedPro
Pricing$0$4.99$22.45$49.99
Slots01512
CNAME-truetruetrue
Always On-truetruetrue

FAQ

What is a slot?

One slot is equal to one compute unit. The free plan has no always-on +machines and they will be stopped while not in use.

What is a workspace?

A workspace is the security boundary of your deployment. You will use +the same credentials to deploy to one.

A workspace can be either static or dynamic. +A static workspace always deploy to the same domain, while +in a dynamic workspace each deployment will have its own domain. +The latter are useful to provde for pull request previews.

Can I change or upgrade my plan after I start using Platformatic?

Plans can be changed or upgraded at any time

What does it mean I can set my own CNAME?

Free applications only gets a *.deploy.space domain name to access +their application. All other plans can set it to a domain of their chosing.

+ + + + \ No newline at end of file diff --git a/docs/0.41.3/platformatic-cloud/quick-start-guide/index.html b/docs/0.41.3/platformatic-cloud/quick-start-guide/index.html new file mode 100644 index 00000000000..b9a724adea0 --- /dev/null +++ b/docs/0.41.3/platformatic-cloud/quick-start-guide/index.html @@ -0,0 +1,45 @@ + + + + + +Cloud Quick Start Guide | Platformatic Open Source Software + + + + + +
+
Version: 0.41.3

Cloud Quick Start Guide

This guide shows you how to create and deploy an application to +Platformatic Cloud.

Prerequisites

To follow along with this guide you'll need to have these things installed:

You will also need to have a GitHub account.

Log in to Platformatic Cloud

Go to the Platformatic Cloud website and click on the +Continue with GitHub button. You'll be transferred to a GitHub page that +asks you to Authorize Platformatic Cloud. To continue, click on the +Authorize platformatic button.

Screenshot of Continue with GitHub button

On the Platformatic Cloud Service Agreements page, check the boxes and +click the Continue button. You'll then be redirected to your Cloud Dashboard page.

Create a Cloud app

Screenshot of an empty Apps page

Click the Create an app now button on your Cloud Dashboard page.

Enter quick-start-app as your application name. Click the Create Application button.

Create a static app workspace

Enter production as the name for your workspace. Then click on the Create Workspace button.

On the next page you'll see the Workspace ID and API key for your app workspace. +Copy them and store them somewhere secure for future reference, for example in a password manager app. +The API key will be used to deploy your app to the workspace that you've just created.

Click on the Back to dashboard button.

Create a dynamic app workspace

On your Cloud Dashboard, click on your app, then click on Create Workspace in the Workspaces +sidebar.

Screenshot of the create app workspace screen

The Dynamic Workspace option will be automatically enabled as you have already created a +static workspace. Dynamic workspaces can be used to deploy preview applications for GitHub +pull requests.

Enter development as the name for your workspace, then click on the Create Workspace button. +Copy the Workspace ID and API key and store them somewhere secure.

Create a GitHub repository

Go to the Create a new repository page on GitHub. +Enter quick-start-app as the Repository name for your new repository. +Click on the Add a README file checkbox and click the Create repository +button.

Add the workspace API keys as repository secrets

Go to the Settings tab on your app's GitHub repository. Click into the +Secrets and variables > Actions section and add the following secrets:

NameSecret
PLATFORMATIC_STATIC_WORKSPACE_IDYour app's static workspace ID
PLATFORMATIC_STATIC_WORKSPACE_API_KEYYour app's static workspace API key
PLATFORMATIC_DYNAMIC_WORKSPACE_IDYour app's dynamic workspace ID
PLATFORMATIC_DYNAMIC_WORKSPACE_API_KEYYour app's dynamic workspace API key

Click on the New repository secret button to add a secret.

tip

You can also use the GitHub CLI to set secrets on your GitHub repository, for example:

gh secret set \
--app actions \
--env-file <FILENAME_OF_ENV_FILE_WITH_SECRETS> \
--repos <YOUR_GITHUB_USERNAME>/<REPO_NAME>

Create a new Platformatic app

In your terminal, use Git to clone your repository from GitHub. For example:

git clone git@github.com:username/quick-start-app.git
tip

See the GitHub documentation for help with +Cloning a repository.

Now change in to the project directory:

cd quick-start-app

Now run this command to start the Platformatic creator wizard:

npm create platformatic@latest

This interactive command-line tool will ask you some questions about how you'd +like to set up your new Platformatic app. For this guide, select these options:

- Which kind of project do you want to create?     => DB
- Where would you like to create your project? => .
- Do you want to create default migrations? => yes
- Do you want to create a plugin? => yes
- Do you want to use TypeScript? => no
- Do you want to overwrite the existing README.md? => yes
- Do you want to run npm install? => yes (this can take a while)
- Do you want to apply the migrations? => yes
- Do you want to generate types? => yes
- Do you want to create the github action to deploy this application to Platformatic Cloud dynamic workspace? => yes
- Do you want to create the github action to deploy this application to Platformatic Cloud static workspace? => yes

Copy and paste your dynamic and static workspace IDs when prompted by the creator wizard.

Once the wizard is complete, you'll have a Platformatic app project in the +quick-start-app directory, with example migration files and a plugin script.

Deploy the app

In your project directory, commit your application with Git:

git add .

git commit -m "Add Platformatic app"

Now push your changes up to GitHub:

git push origin main

On the GitHub repository page in your browser click on the Actions tab. +You should now see the Platformatic Cloud deployment workflow running.

Test the deployed app

Screenshot of a static app workspace that has had an app deployed to it

Once the GitHub Actions deployment workflow has completed, go to the production workspace +for your app in Platformatic Cloud. Click on the link for the Entry Point. You should now +see the Platformatic DB app home page.

Click on the OpenAPI Documentation link to try out your app's REST API using the Swagger UI.

Screenshot of Swagger UI for a Platformatic DB app

Preview pull request changes

When a pull request is opened on your project's GitHub repository, a preview app will automatically +be deployed to your app's dynamic workspace.

To see a preview app in action, create a new Git branch:

git checkout -b add-hello-endpoint

Then open up your app's plugin.js file in your code editor. Add the following code inside +the existing empty function:

app.get('/hello', async function(request, reply) {
return { hello: 'from Platformatic Cloud' }
})

Save the changes, then commit and push them up to GitHub:

git add plugin.js

git commit -m "Add hello endpoint"

git push -u origin add-hello-endpoint

Now create a pull request for your changes on GitHub. At the bottom of the +pull request page you'll see that a deployment has been triggered to your +app's dynamic workspace.

Screenshot of checks on a GitHub pull request

Once the deployment has completed, a comment will appear on your pull request +with a link to the preview app.

Screenshot of a deployed preview app comment on a GitHub pull request

Click on the Application URL link. If you add /hello on to the URL, +you should receive a response from the endpoint that you just added to +your application.

Screenshot of a JSON response from an API endpoint

+ + + + \ No newline at end of file diff --git a/docs/0.41.3/reference/cli/index.html b/docs/0.41.3/reference/cli/index.html new file mode 100644 index 00000000000..21770a1f13e --- /dev/null +++ b/docs/0.41.3/reference/cli/index.html @@ -0,0 +1,44 @@ + + + + + +Platformatic CLI | Platformatic Open Source Software + + + + + +
+
Version: 0.41.3

Platformatic CLI

Installation and usage

Install the Platformatic CLI as a dependency for your project:

npm install platformatic

Once it's installed you can run it with:

npx platformatic
info

The platformatic package can be installed globally, but installing it as a +project dependency ensures that everyone working on the project is using the +same version of the Platformatic CLI.

Commands

The Platformatic CLI provides the following commands:

help

Welcome to Platformatic. Available commands are:

  • help - display this message.
  • help <command> - show more information about a command.
  • db - start Platformatic DB; type platformatic db help to know more.
  • service - start Platformatic Service; type platformatic service help to know more.
  • upgrade - upgrade the Platformatic configuration to the latest version.
  • gh - create a new gh action for Platformatic deployments.
  • deploy - deploy a Platformatic application to the cloud.
  • runtime - start Platformatic Runtime; type platformatic runtime help to know more.
  • start - start a Platformatic application.
  • frontend- create frontend code to consume the REST APIs.

compile

Compile all typescript plugins.

  $ platformatic compile

This command will compile the TypeScript plugins for each platformatic application.

deploy

Deploys an application to the Platformatic Cloud.

 $ platformatic deploy

Options:

  • -t, --type static/dynamic - The type of the workspace.
  • -c, --config FILE - Specify a configuration file to use.
  • -k, --keys FILE - Specify a path to the workspace keys file.
  • -l --label TEXT - The deploy label. Only for dynamic workspaces.
  • -e --env FILE - The environment file to use. Default: ".env"
  • -s --secrets FILE - The secrets file to use. Default: ".secrets.env"
  • --workspace-id uuid - The workspace id where the application will be deployed.
  • --workspace-key TEXT - The workspace key where the application will be deployed.
  1. To deploy a Platformatic application to the cloud, you should go to the Platformatic cloud dashboard and create a workspace.
  2. Once you have created a workspace, retrieve your workspace id and key from the workspace settings page. Optionally, you can download the provided workspace env file, which you can use with the --keys option.

ℹ️

When deploying an application to a dynamic workspace, specify the deploy --label option. You can find it on your cloud dashboard or you can specify a new one.

gh

Creates a gh action to deploy platformatic services on workspaces.

 $ platformatic gh -t dynamic

Options:

  • -w --workspace ID - The workspace ID where the service will be deployed.
  • -t, --type static/dynamic - The type of the workspace. Defaults to static.
  • -c, --config FILE - Specify a configuration file to use.
  • -b, --build - Build the service before deploying (npm run build).

If not specified, the configuration will be loaded from any of the following, in the current directory.

  • platformatic.db.json, or
  • platformatic.db.yml, or
  • platformatic.db.tml, or
  • platformatic.service.json, or
  • platformatic.service.yml, or
  • platformatic.service.tml

You can find more details about the configuration format here:

start

Start a Platformatic application with the following command:

$ platformatic start

Options:

  • -c, --config <path> - Path to the configuration file.
  • --inspect[=[host:]port] - Start the Node.js debugger. host defaults to '127.0.0.1'. port defaults to 9229. Use caution when binding to a public host:port combination.
  • --inspect-brk[=[host:]port] - Start the Node.js debugger and block until a client has attached. host defaults to '127.0.0.1'. port defaults to 9229. Use caution when binding to a public host:port combination.

upgrade

Upgrade the Platformatic schema configuration to the latest version.

 $ platformatic upgrade

Options:

  • -c, --config FILE - Specify a schema configuration file to use.

If not specified, the configuration will be loaded from any of the following, in the current directory.

  • platformatic.db.json, or
  • platformatic.db.yml, or
  • platformatic.db.tml, or
  • platformatic.service.json, or
  • platformatic.service.yml, or
  • platformatic.service.tml

You can find more details about the configuration format here:

client

platformatic client <command>

help

Create a Fastify plugin that exposes a client for a remote OpenAPI or GraphQL API.

To create a client for a remote OpenAPI API, you can use the following command:

$ platformatic client http://example.com/to/schema/file -n myclient

To create a client for a remote Graphql API, you can use the following command:

$ platformatic client http://example.com/graphql -n myclient

Instead of a URL, you can also use a local file:

$ platformatic client path/to/schema -n myclient

This will create a Fastify plugin that exposes a client for the remote API in a folder myclient +and a file named myclient.js inside it.

If platformatic config file is specified, it will be edited and a clients section will be added. +Then, in any part of your Platformatic application you can use the client.

You can use the client in your application in Javascript, calling a GraphQL endpoint:

module.exports = async function (app, opts) {
app.post('/', async (request, reply) => {
const res = await app.myclient.graphql({
query: 'query { hello }'
})
return res
})
}

or in Typescript, calling an OpenAPI endpoint:

import { FastifyInstance } from 'fastify'
/// <reference path="./myclient" />

export default async function (app: FastifyInstance) {
app.get('/', async () => {
return app.myclient.get({})
})
}

Options:

  • -c, --config <path> - Path to the configuration file.
  • -n, --name <name> - Name of the client.
  • -f, --folder <name> - Name of the plugin folder, defaults to --name value.
  • -t, --typescript - Generate the client plugin in TypeScript.
  • --full-response - Client will return full response object rather than just the body.
  • --full-request - Client will be called with all parameters wrapped in body, headers and query properties.
  • --full - Enables both --full-request and --full-response overriding them.
  • --optional-headers <headers> - Comma separated string of headers that will be marked as optional in the type file

composer

platformatic composer <command>

help

Available commands:

  • help - show this help message.
  • help <command> - shows more information about a command.
  • start - start the server.
  • openapi schemas fetch - fetch OpenAPI schemas from services.

openapi schemas fetch

Fetch OpenAPI schemas from remote services to use in your Platformatic project.

  $ platformatic composer openapi schemas fetch

It will fetch all the schemas from the remote services and store them by path +set in the platformatic.composer.json file. If the path is not set, it will +skip fetching the schema.

start

Start the Platformatic Composer server with the following command:

 $ platformatic composer start

You will need a configuration file. Here is an example to get you started, +save the following as platformatic.composer.json:

  {
"server": {
"hostname": "127.0.0.1",
"port": 0,
"logger": {
"level": "info"
}
},
"composer": {
"services": [
{
"id": "service1",
"origin": "http://127.0.0.1:3051",
"openapi": {
"url": "/documentation/json"
}
},
{
"id": "service2",
"origin": "http://127.0.0.1:3052",
"openapi": {
"file": "./schemas/service2.openapi.json"
}
}
],
"refreshTimeout": 1000
}
}

By sending the SIGUSR2 signal, the server can be reloaded.

Options:

  • -c, --config FILE - Specify a configuration file to use.

If not specified, the configuration will be loaded from any of the following, in the current directory.

  • platformatic.composer.json, or
  • platformatic.composer.yml, or
  • platformatic.composer.tml

You can find more details about the configuration format here:

db

platformatic db <command>

compile

Compile typescript plugins.

  $ platformatic db compile

As a result of executing this command, the Platformatic DB will compile typescript +plugins in the outDir directory.

If not specified, the configuration will be loaded from any of the following, in the current directory.

  • platformatic.db.json, or
  • platformatic.db.yml, or
  • platformatic.db.tml

You can find more details about the configuration format here:

help

Available commands:

  • help - show this help message.
  • help <command> - shows more information about a command.
  • start - start the server.
  • compile - compile typescript plugins.
  • seed - run a seed file.
  • types - generate typescript types for entities.
  • schema - generate and print api schema.
  • migrations create - generate do and undo migration files.
  • migrations apply - apply migration files.

migrations apply

Apply all configured migrations to the database:

  $ platformatic db migrations apply

The migrations will be applied in the order they are specified in the +folder defined in the configuration file. If you want to apply a specific migration, +you can use the --to option:

  $ platformatic db migrations apply --to 001

Here is an example migration:

  CREATE TABLE graphs (
id SERIAL PRIMARY KEY,
name TEXT
);

You can always rollback to a specific migration with:

  $ platformatic db migrations apply --to VERSION

Use 000 to reset to the initial state.

Options:

  • -c, --config <path> - Path to the configuration file.
  • -t, --to <version> - Migrate to a specific version.

If not specified, the configuration will be loaded from any of the following, in the current directory.

  • platformatic.db.json, or
  • platformatic.db.yml, or
  • platformatic.db.tml

You can find more details about the configuration format here:

migrations create

Create next migration files.

  $ platformatic db migrations create

It will generate do and undo sql files in the migrations folder. The name of the +files will be the next migration number.

  $ platformatic db migrations create --name "create_users_table"

Options:

  • -c, --config <path> - Path to the configuration file.

If not specified, the configuration will be loaded from any of the following, in the current directory.

  • platformatic.db.json, or
  • platformatic.db.yml, or
  • platformatic.db.tml

You can find more details about the configuration format here:

migrations

Available commands:

  • migrations create - generate do and undo migration files.
  • migrations apply - apply migration files.

schema

Update the config schema file:

  • schema config - update the JSON schema config available on platformatic.db.schema.json

Your configuration on platformatic.db.json has a schema defined to improve the developer experience and avoid mistakes when updating the configuration of Platformatic DB. +When you run platformatic db init, a new JSON $schema property is added in platformatic.db.schema.json. This can allow your IDE to add suggestions (f.e. mandatory/missing fields, types, default values) by opening the config in platformatic.db.json. +Running platformatic db schema config you can update your schema so that it matches well the latest changes available on your config.

Generate a schema from the database and prints it to standard output:

  • schema graphql - generate the GraphQL schema
  • schema openapi - generate the OpenAPI schema

Options:

  • -c, --config FILE - Specify a configuration file to use.

If not specified, the configuration will be loaded from any of the following, in the current directory.

  • platformatic.db.json, or
  • platformatic.db.yml, or
  • platformatic.db.tml

You can find more details about the configuration format here:

seed

Load a seed into the database. This is a convenience method that loads +a JavaScript file and configure @platformatic/sql-mapper to connect to +the database specified in the configuration file.

Here is an example of a seed file:

  'use strict'

module.exports = async function ({ entities, db, sql }) {
await entities.graph.save({ input: { name: 'Hello' } })
await db.query(sql`
INSERT INTO graphs (name) VALUES ('Hello 2');
`)
}

You can run this using the seed command:

  $ platformatic db seed seed.js

Options:

  • --config - Path to the configuration file.

If not specified, the configuration will be loaded from any of the following, in the current directory.

  • platformatic.db.json, or
  • platformatic.db.yml, or
  • platformatic.db.tml

You can find more details about the configuration format here:

start

Start the Platformatic DB server with the following command:

 $ platformatic db start

You will need a configuration file. Here is an example to get you started, +save the following as platformatic.db.json:

  {
"server": {
"hostname": "127.0.0.1",
"port": 0,
"logger": {
"level": "info"
}
},
"db": {
"connectionString": "sqlite://./db"
},
"migrations": {
"dir": "./migrations"
}
}

Remember to create a migration, run the db help migrate command to know more.

All outstanding migrations will be applied to the database unless the +migrations.autoApply configuration option is set to false.

By sending the SIGUSR2 signal, the server can be reloaded.

Options:

  • -c, --config FILE - Specify a configuration file to use.

If not specified, the configuration will be loaded from any of the following, in the current directory.

  • platformatic.db.json, or
  • platformatic.db.yml, or
  • platformatic.db.tml

You can find more details about the configuration format here:

types

Generate typescript types for your entities from the database.

  $ platformatic db types

As a result of executing this command, the Platformatic DB will generate a types +folder with a typescript file for each database entity. It will also generate a +global.d.ts file that injects the types into the Application instance.

In order to add type support to your plugins, you need to install some additional +dependencies. To do this, copy and run an npm install command with dependencies +that "platformatic db types" will ask you.

Here is an example of a platformatic plugin.js with jsdoc support. +You can use it to add autocomplete to your code.

/// <reference path="./global.d.ts" />
'use strict'

/** @param {import('fastify').FastifyInstance} app */
module.exports = async function (app) {
app.get('/movie', async () => {
const movies = await app.platformatic.entities.movie.find({
where: { title: { eq: 'The Hitchhiker\'s Guide to the Galaxy' } }
})
return movies[0].id
})
}

If not specified, the configuration will be loaded from any of the following, in the current directory.

  • platformatic.db.json, or
  • platformatic.db.yml, or
  • platformatic.db.tml

You can find more details about the configuration format here:

service

platformatic service <command>

compile

Compile typescript plugins.

  $ platformatic service compile

As a result of executing this command, Platformatic Service will compile typescript +plugins in the outDir directory.

If not specified, the configuration will be loaded from any of the following, in the current directory.

  • platformatic.service.json, or
  • platformatic.service.yml, or
  • platformatic.service.tml

You can find more details about the configuration format here:

help

Available commands:

  • help - show this help message.
  • help <command> - shows more information about a command.
  • start - start the server.
  • schema config - generate the schema configuration file.

schema

Update the config schema file:

  • schema config - update the JSON schema config available on platformatic.service.schema.json

Your configuration on platformatic.service.json has a schema defined to improve the developer experience and avoid mistakes when updating the configuration of Platformatic Service. +When you initialize a new Platformatic service (f.e. running npm create platformatic@latest), a new JSON $schema property is added in the platformatic.service.json config. This can allow your IDE to add suggestions (f.e. mandatory/missing fields, types, default values) by opening the config in platformatic.service.json. +Running platformatic service schema config you can update your schema so that it matches well the latest changes available on your config.

start

Start the Platformatic Service with the following command:

 $ platformatic service start

You will need a configuration file. Here is an example to get you started, +save the following as platformatic.service.json:

{
"server": {
"hostname": "127.0.0.1",
"port": 0,
"logger": {
"level": "info"
}
},
"plugin": {
"path": "./plugin.js"
}
}

frontend

platformatic frontend <url> <language>

Create frontend code to consume the REST APIs of a Platformatic application.

From the directory you want the frontend code to be generated (typically <YOUR_FRONTEND_APP_DIRECTORY>/src/) run -

npx platformatic frontend http://127.0.0.1:3042 ts

ℹ️

Where http://127.0.0.1:3042 must be replaced with your Platformatic application endpoint, and the language can either be ts or js. When the command is run, the Platformatic CLI generates -

  • api.d.ts - A TypeScript module that includes all the OpenAPI-related types.
  • api.ts or api.js - A module that includes a function for every single REST endpoint.

If you use the --name option it will create custom file names.

npx platformatic frontend http://127.0.0.1:3042 ts --name foobar

Will create foobar.ts and foobar-types.d.ts

Refer to the dedicated guide where the full process of generating and consuming the frontend code is described.

In case of problems, please check that:

  • The Platformatic app URL is valid.
  • The Platformatic app whose URL belongs must be up and running.
  • OpenAPI must be enabled (db.openapi in your platformatic.db.json is not set to false). You can find more details about the db configuration format here.
  • CORS must be managed in your Platformatic app (server.cors.origin.regexp in your platformatic.db.json is set to /*/, for instance). You can find more details about the cors configuration here.

runtime

platformatic runtime <command>

compile

Compile all typescript plugins for all services.

  $ platformatic runtime compile

This command will compile the TypeScript +plugins for each services registered in the runtime.

help

Available commands:

  • help - show this help message.
  • help <command> - shows more information about a command.
  • start - start the application.

start

Start the Platformatic Runtime with the following command:

 $ platformatic runtime start

start

Start a Platformatic application with the following command:

$ platformatic start

Options:

  • -c, --config <path> - Path to the configuration file.
  • --inspect[=[host:]port] - Start the Node.js debugger. host defaults to '127.0.0.1'. port defaults to 9229. Use caution when binding to a public host:port combination.
  • --inspect-brk[=[host:]port] - Start the Node.js debugger and block until a client has attached. host defaults to '127.0.0.1'. port defaults to 9229. Use caution when binding to a public host:port combination.
+ + + + \ No newline at end of file diff --git a/docs/0.41.3/reference/client/frontend/index.html b/docs/0.41.3/reference/client/frontend/index.html new file mode 100644 index 00000000000..ff41c3e9e54 --- /dev/null +++ b/docs/0.41.3/reference/client/frontend/index.html @@ -0,0 +1,17 @@ + + + + + +Frontend client | Platformatic Open Source Software + + + + + +
+
Version: 0.41.3

Frontend client

Create implementation and type files that exposes a client for a remote OpenAPI server, that uses fetch and can run in any browser.

To create a client for a remote OpenAPI API, you can use the following command:

$ platformatic frontend http://exmaple.com/to/schema/file <language> --name <clientname>

where <language> can be either js or ts.

This will create two files clientname.js (or clientname.ts) and clientname-types.d.ts for types.

clientname by default is api

Usage

The implementation generated by the tool exports all the named operation found and a factory object.

Named operations

import { setBaseUrl, getMovies } from './api.js'

setBaseUrl('http://my-server-url.com') // modifies the global `baseUrl` variable

const movies = await getMovies({})
console.log(movies)

Factory

The factory object is called build and can be used like this

import build from './api.js'

const client = build('http://my-server-url.com')

const movies = await client.getMovies({})
console.log(movies)

You can use both named operations and the factory in the same file. They can work on different hosts, so the factory does not use the global setBaseUrl function.

Generated Code

The type file will look like this

export interface GetMoviesRequest {
'limit'?: number;
'offset'?: number;
// ... all other options
}

interface GetMoviesResponseOK {
'id': number;
'title': string;
}
export interface Api {
setBaseUrl(newUrl: string) : void;
getMovies(req: GetMoviesRequest): Promise<Array<GetMoviesResponseOK>>;
// ... all operations listed here
}

type PlatformaticFrontendClient = Omit<Api, 'setBaseUrl'>
export default function build(url: string): PlatformaticFrontendClient

The javascript implementation will look like this

let baseUrl = ''
/** @type {import('./api-types.d.ts').Api['setBaseUrl']} */
export const setBaseUrl = (newUrl) => { baseUrl = newUrl }

/** @type {import('./api-types.d.ts').Api['getMovies']} */
export const getMovies = async (request) => {
return await _getMovies(baseUrl, request)
}
async function _createMovie (url, request) {
const response = await fetch(`${url}/movies/`, {
method:'post',
body: JSON.stringify(request),
headers: {
'Content-Type': 'application/json'
}
})

if (!response.ok) {
throw new Error(await response.text())
}

return await response.json()
}

/** @type {import('./api-types.d.ts').Api['createMovie']} */
export const createMovie = async (request) => {
return await _createMovie(baseUrl, request)
}
// ...

export default function build (url) {
return {
getMovies: _getMovies.bind(url, ...arguments),
// ...
}
}

The typescript implementation will look like this

import type { Api } from './api-types'
import * as Types from './api-types'

let baseUrl = ''
export const setBaseUrl = (newUrl: string) : void => { baseUrl = newUrl }

const _getMovies = async (url: string, request: Types.GetMoviesRequest) => {
const response = await fetch(`${url}/movies/?${new URLSearchParams(Object.entries(request || {})).toString()}`)

if (!response.ok) {
throw new Error(await response.text())
}

return await response.json()
}

export const getMovies: Api['getMovies'] = async (request: Types.GetMoviesRequest) => {
return await _getMovies(baseUrl, request)
}
// ...
export default function build (url) {
return {
getMovies: _getMovies.bind(url, ...arguments),
// ...
}
}
+ + + + \ No newline at end of file diff --git a/docs/0.41.3/reference/client/introduction/index.html b/docs/0.41.3/reference/client/introduction/index.html new file mode 100644 index 00000000000..7abbaacef1b --- /dev/null +++ b/docs/0.41.3/reference/client/introduction/index.html @@ -0,0 +1,34 @@ + + + + + +Platformatic Client | Platformatic Open Source Software + + + + + +
+
Version: 0.41.3

Platformatic Client

Create a Fastify plugin that exposes a client for a remote OpenAPI or GraphQL API.

To create a client for a remote OpenAPI API, you can use the following command:

$ platformatic client http://exmaple.com/to/schema/file --name myclient

To create a client for a remote Graphql API, you can use the following command:

$ platformatic client http://exmaple.com/grapqhl --name myclient

Usage with Platformatic Service or Platformatic DB

If you run the generator in in a Platformatic application, and it will +automatically extend it to load your client by editing the configuration file +and adding a clients section. +Then, in any part of your Platformatic application you can use the client.

You can use the client in your application in Javascript, calling a GraphQL endpoint:

// Use a typescript reference to set up autocompletion
// and explore the generated APIs.

/// <reference path="./myclient" />

/** @type {import('fastify').FastifyPluginAsync<{} */
module.exports = async function (app, opts) {
app.post('/', async (request, reply) => {
const res = await app.myclient.graphql({
query: 'query { movies { title } }'
})
return res
})
}

or in Typescript, calling an OpenAPI endpoint:

import { FastifyInstance } from 'fastify'
/// <reference path="./myclient" />

export default async function (app: FastifyInstance) {
app.get('/', async () => {
return app.myclient.get({})
})
}

The client configuration in the platformatic.db.json and platformatic.service.json would look like:

{
"clients": [{
"schema": "./myclient/myclient.openapi.json" // or ./myclient/myclient.schema.graphl
"name": "myclient",
"type": "openapi" // or graphql
"url": "{ PLT_MYCLIENT_URL }"
}]
}

Note that the generator would also have updated the .env and .env.sample files if they exists.

Generating a client for a service running within Platformatic Runtime

Platformatic Runtime allows you to create a network of services that are not exposed. +To create a client to invoke one of those services from another, run:

$ platformatic client --name <clientname> --runtime <serviceId>

Where <clientname> is the name of the client and <serviceId> is the id of the given service +(which correspond in the basic case with the folder name of that service). +The client generated is identical to the one in the previous section.

Note that this command looks for a platformatic.runtime.json in a parent directory.

Example

As an example, consider a network of three microservices:

  • somber-chariot, an instance of Platformatic DB;
  • languid-noblemen, an instance of Platformatic Service;
  • pricey-paesant, an instance of Platformatic Composer, which is also the runtime entrypoint.

From within the languid-noblemen folder, we can run:

$ platformatic client --name chariot --runtime somber-chariot

The client configuration in the platformatic.db.json and platformatic.service.json would look like:

{
"clients": [{
"path": "./chariot",
"serviceId": "somber-chariot"
}]
}

Even if the client is generated from an HTTP endpoint, it is possible to add a serviceId property each client object shown above. +This is not required, but if using the Platformatic Runtime, the serviceId +property will be used to identify the service dependency.

Types Generator

The types for the client are automatically generated for both OpenAPI and GraphQL schemas.

You can generate only the types with the --types-only flag.

For example

$ platformatic client http://exmaple.com/to/schema/file --name myclient --types-only

Will create the single myclient.d.ts file in current directory

OpenAPI

We provide a fully typed experience for OpenAPI, Typing both the request and response for +each individual OpenAPI operation.

Consider this example:

// Omitting all the individual Request and Reponse payloads for brevity

interface Client {
getMovies(req: GetMoviesRequest): Promise<Array<GetMoviesResponse>>;
createMovie(req: CreateMovieRequest): Promise<CreateMovieResponse>;
updateMovies(req: UpdateMoviesRequest): Promise<Array<UpdateMoviesResponse>>;
getMovieById(req: GetMovieByIdRequest): Promise<GetMovieByIdResponse>;
updateMovie(req: UpdateMovieRequest): Promise<UpdateMovieResponse>;
updateMovie(req: UpdateMovieRequest): Promise<UpdateMovieResponse>;
deleteMovies(req: DeleteMoviesRequest): Promise<DeleteMoviesResponse>;
getQuotesForMovie(req: GetQuotesForMovieRequest): Promise<Array<GetQuotesForMovieResponse>>;
getQuotes(req: GetQuotesRequest): Promise<Array<GetQuotesResponse>>;
createQuote(req: CreateQuoteRequest): Promise<CreateQuoteResponse>;
updateQuotes(req: UpdateQuotesRequest): Promise<Array<UpdateQuotesResponse>>;
getQuoteById(req: GetQuoteByIdRequest): Promise<GetQuoteByIdResponse>;
updateQuote(req: UpdateQuoteRequest): Promise<UpdateQuoteResponse>;
updateQuote(req: UpdateQuoteRequest): Promise<UpdateQuoteResponse>;
deleteQuotes(req: DeleteQuotesRequest): Promise<DeleteQuotesResponse>;
getMovieForQuote(req: GetMovieForQuoteRequest): Promise<GetMovieForQuoteResponse>;
}

type ClientPlugin = FastifyPluginAsync<NonNullable<client.ClientOptions>>

declare module 'fastify' {
interface FastifyInstance {
'client': Client;
}

interface FastifyRequest {
'client': Client;
}
}

declare namespace Client {
export interface ClientOptions {
url: string
}
export const client: ClientPlugin;
export { client as default };
}

declare function client(...params: Parameters<ClientPlugin>): ReturnType<ClientPlugin>;
export = client;

GraphQL

We provide a partially typed experience for GraphQL, because we do not want to limit +how you are going to query the remote system. Take a look at this example:

declare module 'fastify' {
interface GraphQLQueryOptions {
query: string;
headers: Record<string, string>;
variables: Record<string, unknown>;
}
interface GraphQLClient {
graphql<T>(GraphQLQuery): PromiseLike<T>;
}
interface FastifyInstance {
'client'
: GraphQLClient;

}

interface FastifyRequest {
'client'<T>(GraphQLQuery): PromiseLike<T>;
}
}

declare namespace client {
export interface Clientoptions {
url: string
}
export interface Movie {
'id'?: string;

'title'?: string;

'realeasedDate'?: string;

'createdAt'?: string;

'preferred'?: string;

'quotes'?: Array<Quote>;

}
export interface Quote {
'id'?: string;

'quote'?: string;

'likes'?: number;

'dislikes'?: number;

'movie'?: Movie;

}
export interface MoviesCount {
'total'?: number;

}
export interface QuotesCount {
'total'?: number;

}
export interface MovieDeleted {
'id'?: string;

}
export interface QuoteDeleted {
'id'?: string;

}
export const client: Clientplugin;
export { client as default };
}

declare function client(...params: Parameters<Clientplugin>): ReturnType<Clientplugin>;
export = client;

Given only you can know what GraphQL query you are producing, you are responsible for typing +it accordingly.

Usage with standalone Fastify

If a platformatic configuration file is not found, a complete Fastify plugin is generated to be +used in your Fastify application like so:

const fastify = require('fastify')()
const client = require('./your-client-name')

fastify.register(client, {
url: 'http://example.com'
})

// GraphQL
fastify.post('/', async (request, reply) => {
const res = await request.movies.graphql({
query: 'mutation { saveMovie(input: { title: "foo" }) { id, title } }'
})
return res
})

// OpenAPI
fastify.post('/', async (request, reply) => {
const res = await request.movies.createMovie({ title: 'foo' })
return res
})

fastify.listen({ port: 3000 })

Note that you would need to install @platformatic/client as a depedency.

How are the method names defined in OpenAPI

The names of the operations are defined in the OpenAPI specification. +Specifically, we use the operationId. +If that's not part of the spec, +the name is generated by combining the parts of the path, +like /something/{param1}/ and a method GET, it genertes getSomethingParam1.

Authentication

It's very common that downstream services requires some form of Authentication. +How could we add the necessary headers? You can configure them from your plugin:

/// <reference path="./myclient" />

/** @type {import('fastify').FastifyPluginAsync<{} */
module.exports = async function (app, opts) {
app.configureMyclient({
async getHeaders (req, reply) {
return {
'foo': 'bar'
}
}
})

app.post('/', async (request, reply) => {
const res = await app.myclient.graphql({
query: 'query { movies { title } }'
})
return res
})
}

Telemetry propagation

To correctly propagate telemetry information, be sure to get the client from the request object, e.g.:

fastify.post('/', async (request, reply) => {
const res = await request.movies.createMovie({ title: 'foo' })
return res
})
+ + + + \ No newline at end of file diff --git a/docs/0.41.3/reference/client/programmatic/index.html b/docs/0.41.3/reference/client/programmatic/index.html new file mode 100644 index 00000000000..c40395ce47d --- /dev/null +++ b/docs/0.41.3/reference/client/programmatic/index.html @@ -0,0 +1,17 @@ + + + + + +Programmatic API | Platformatic Open Source Software + + + + + +
+
Version: 0.41.3

Programmatic API

It is possible to use the Platformatic client without the generator.

OpenAPI Client

import { buildOpenAPIClient } from '@platformatic/client'

const client = await buildOpenAPIClient({
url: `https://yourapi.com/documentation/json`,
// path: 'path/to/openapi.json',
headers: {
'foo': 'bar'
}
})

const res = await client.yourOperationName({ foo: 'bar' })

console.log(res)

If you use Typescript you can take advantage of the generated types file

import { buildOpenAPIClient } from '@platformatic/client'
import Client from './client'
//
// interface Client {
// getMovies(req: GetMoviesRequest): Promise<Array<GetMoviesResponse>>;
// createMovie(req: CreateMovieRequest): Promise<CreateMovieResponse>;
// ...
// }
//

const client: Client = await buildOpenAPIClient<Client>({
url: `https://yourapi.com/documentation/json`,
// path: 'path/to/openapi.json',
headers: {
'foo': 'bar'
}
})

const res = await client.getMovies()
console.log(res)

GraphQL Client

import { buildGraphQLClient } from '@platformatic/client'

const client = await buildGraphQLClient({
url: `https://yourapi.com/graphql`,
headers: {
'foo': 'bar'
}
})

const res = await client.graphql({
query: `
mutation createMovie($title: String!) {
saveMovie(input: {title: $title}) {
id
title
}
}
`,
variables: {
title: 'The Matrix'
}
})

console.log(res)
+ + + + \ No newline at end of file diff --git a/docs/0.41.3/reference/composer/api-modification/index.html b/docs/0.41.3/reference/composer/api-modification/index.html new file mode 100644 index 00000000000..273ffb55eaf --- /dev/null +++ b/docs/0.41.3/reference/composer/api-modification/index.html @@ -0,0 +1,19 @@ + + + + + +API modification | Platformatic Open Source Software + + + + + +
+
Version: 0.41.3

API modification

If you want to modify automatically generated API, you can use composer custom onRoute hook.

addComposerOnRouteHook(openApiPath, methods, handler)

  • openApiPath (string) - A route OpenAPI path that Platformatic Composer takes from the OpenAPI specification.
  • methods (string[]) - Route HTTP methods that Platformatic Composer takes from the OpenAPI specification.
  • handler (function) - fastify onRoute hook handler.

onComposerResponse

onComposerResponse hook is called after the response is received from a composed service. +It might be useful if you want to modify the response before it is sent to the client. +If you want to use it you need to add onComposerResponse property to the config object of the route options.

  • request (object) - fastify request object.
  • reply (object) - fastify reply object.
  • body (object) - undici response body object.

Example

app.platformatic.addComposerOnRouteHook('/users/{id}', ['GET'], routeOptions => {
routeOptions.schema.response[200] = {
type: 'object',
properties: {
firstName: { type: 'string' },
lastName: { type: 'string' }
}
}

async function onComposerResponse (request, reply, body) {
const payload = await body.json()
const newPayload = {
firstName: payload.first_name,
lastName: payload.last_name
}
reply.send(newPayload)
}
routeOptions.config.onComposerResponse = onComposerResponse
})
+ + + + \ No newline at end of file diff --git a/docs/0.41.3/reference/composer/configuration/index.html b/docs/0.41.3/reference/composer/configuration/index.html new file mode 100644 index 00000000000..da25fe5688d --- /dev/null +++ b/docs/0.41.3/reference/composer/configuration/index.html @@ -0,0 +1,38 @@ + + + + + +Configuration | Platformatic Open Source Software + + + + + +
+
Version: 0.41.3

Configuration

Platformatic Composer configured with a configuration file. It supports the use +of environment variables as setting values with configuration placeholders.

Configuration file

If the Platformatic CLI finds a file in the current working directory matching +one of these filenames, it will automatically load it:

  • platformatic.composer.json
  • platformatic.composer.json5
  • platformatic.composer.yml or platformatic.composer.yaml
  • platformatic.composer.tml or platformatic.composer.toml

Alternatively, a --config option with a configuration +filepath can be passed to most platformatic composer CLI commands.

The configuration examples in this reference use JSON.

Supported formats

FormatExtensions
JSON.json
JSON5.json5
YAML.yml, .yaml
TOML.tml

Comments are supported by the JSON5, YAML and TOML file formats.

Settings

Configuration settings are organised into the following groups:

Sensitive configuration settings containing sensitive data should be set using configuration placeholders.

server

A required object with the following settings:

  • hostname (required, string) — Hostname where Platformatic Composer server will listen for connections.

  • port (required, number) — Port where Platformatic Composer server will listen for connections.

  • healthCheck (boolean or object) — Enables the health check endpoint.

    • Powered by @fastify/under-pressure.
    • The value can be an object, used to specify the interval between checks in milliseconds (default: 5000)

    Example

    {
    "server": {
    ...
    "healthCheck": {
    "interval": 2000
    }
    }
    }
  • cors (object) — Configuration for Cross-Origin Resource Sharing (CORS) headers.

    • All options will be passed to the @fastify/cors plugin. In order to specify a RegExp object, you can pass { regexp: 'yourregexp' }, +it will be automatically converted.
  • logger (object) -- the logger configuration.

  • pluginTimeout (integer) -- the number of milliseconds to wait for a Fastify plugin to load, see the fastify docs for more details.

  • https (object) - Configuration for HTTPS supporting the following options.

    • key (required, string, object, or array) - If key is a string, it specifies the private key to be used. If key is an object, it must have a path property specifying the private key file. Multiple keys are supported by passing an array of keys.
    • cert (required, string, object, or array) - If cert is a string, it specifies the certificate to be used. If cert is an object, it must have a path property specifying the certificate file. Multiple certificates are supported by passing an array of keys.

metrics

Configuration for a Prometheus server that will export monitoring metrics +for the current server instance. It uses fastify-metrics +under the hood.

This setting can be a boolean or an object. If set to true the Prometheus server will listen on http://0.0.0.0:9090.

Supported object properties:

  • hostname (string) — The hostname where Prometheus server will listen for connections.
  • port (number) — The port where Prometheus server will listen for connections.
  • auth (object) — Basic Auth configuration. username and password are required here +(use environment variables).

plugins

An optional object that defines the plugins loaded by Platformatic Composer.

  • paths (required, array): an array of paths (string) +or an array of objects composed as follows,

    • path (string): Relative path to plugin's entry point.
    • options (object): Optional plugin options.
    • encapsulate (boolean): if the path is a folder, it instruct Platformatic to not encapsulate those plugins.
    • maxDepth (integer): if the path is a folder, it limits the depth to load the content from.
  • typescript (boolean): enable typescript compilation. A tsconfig.json file is required in the same folder.

    Example

    {
    "plugins": {
    "paths": [{
    "path": "./my-plugin.js",
    "options": {
    "foo": "bar"
    }
    }]
    }
    }

watch

Disable watching for file changes if set to false. It can also be customized with the following options:

  • ignore (string[], default: null): List of glob patterns to ignore when watching for changes. If null or not specified, ignore rule is not applied. Ignore option doesn't work for typescript files.

  • allow (string[], default: ['*.js', '**/*.js']): List of glob patterns to allow when watching for changes. If null or not specified, allow rule is not applied. Allow option doesn't work for typescript files.

    Example

    {
    "watch": {
    "ignore": ["*.mjs", "**/*.mjs"],
    "allow": ["my-plugin.js", "plugins/*.js"]
    }
    }

composer

Configure @platformatic/composer specific settings such as services or refreshTimeout:

  • services (array, default: []) — is an array of objects that defines +the services managed by the composer. Each service object supports the following settings:

    • id (required, string) - A unique identifier for the service.
    • origin (string) - A service origin. Skip this option if the service is executing inside of Platformatic Runtime. In this case, service id will be used instead of origin.
    • openapi (required, object) - The configuration file used to compose OpenAPI specification. See the openapi for details.
    • proxy (object or false) - Service proxy configuration. If false, the service proxy is disabled.
      • prefix (required, string) - Service proxy prefix. All service routes will be prefixed with this value.
    • refreshTimeout (number) - The number of milliseconds to wait for check for changes in the service OpenAPI specification. If not specified, the default value is 1000.

openapi

  • url (string) - A path of the route that exposes the OpenAPI specification. If a service is a Platformatic Service or Platformatic DB, use /documentation/json as a value. Use this or file option to specify the OpenAPI specification.
  • file (string) - A path to the OpenAPI specification file. Use this or url option to specify the OpenAPI specification.
  • prefix (string) - A prefix for the OpenAPI specification. All service routes will be prefixed with this value.
  • config (string) - A path to the OpenAPI configuration file. This file is used to customize the OpenAPI specification. See the openapi-configuration for details.
openapi-configuration

The OpenAPI configuration file is a JSON file that is used to customize the OpenAPI specification. It supports the following options:

  • ignore (boolean) - If true, the route will be ignored by the composer. +If you want to ignore a specific method, use the ignore option in the nested method object.

    Example

    {
    "paths": {
    "/users": {
    "ignore": true
    },
    "/users/{id}": {
    "get": { "ignore": true },
    "put": { "ignore": true }
    }
    }
    }
  • alias (string) - Use it create an alias for the route path. Original route path will be ignored.

    Example

    {
    "paths": {
    "/users": {
    "alias": "/customers"
    }
    }
    }
  • rename (string) - Use it to rename composed route response fields. +Use json schema format to describe the response structure. For now it works only for 200 response.

    Example

    {
    "paths": {
    "/users": {
    "responses": {
    "200": {
    "type": "array",
    "items": {
    "type": "object",
    "properties": {
    "id": { "rename": "user_id" },
    "name": { "rename": "first_name" }
    }
    }
    }
    }
    }
    }
    }

Examples

Composition of two remote services:

{
"composer": {
"services": [
{
"id": "auth-service",
"origin": "https://auth-service.com",
"openapi": {
"url": "/documentation/json",
"prefix": "auth"
}
},
{
"id": "payment-service",
"origin": "https://payment-service.com",
"openapi": {
"file": "./schemas/payment-service.json"
}
}
],
"refreshTimeout": 1000
}
}

Composition of two local services inside of Platformatic Runtime:

{
"composer": {
"services": [
{
"id": "auth-service",
"openapi": {
"url": "/documentation/json",
"prefix": "auth"
}
},
{
"id": "payment-service",
"openapi": {
"file": "./schemas/payment-service.json"
}
}
],
"refreshTimeout": 1000
}
}

telemetry

Open Telemetry is optionally supported with these settings:

  • serviceName (required, string) — Name of the service as will be reported in open telemetry.
  • version (string) — Optional version (free form)
  • skip (array). Optional list of operations to skip when exporting telemetry defined object with properties:
    • method: GET, POST, PUT, DELETE, PATCH, HEAD, OPTIONS, TRACE
    • path. e.g.: /documentation/json
  • exporter (object or array) — Exporter configuration. If not defined, the exporter defaults to console. If an array of objects is configured, every object must be a valid exporter object. The exporter object has the following properties:
    • type (string) — Exporter type. Supported values are console, otlp, zipkin and memory (default: console). memory is only supported for testing purposes.
    • options (object) — These options are supported:
      • url (string) — The URL to send the telemetry to. Required for otlp exporter. This has no effect on console and memory exporters.
      • headers (object) — Optional headers to send with the telemetry. This has no effect on console and memory exporters.

Note that OTLP traces can be consumed by different solutions, like Jaeger. Here the full list.

Example

{
"telemetry": {
"serviceName": "test-service",
"exporter": {
"type": "otlp",
"options": {
"url": "http://localhost:4318/v1/traces"
}
}
}
}

Environment variable placeholders

The value for any configuration setting can be replaced with an environment variable +by adding a placeholder in the configuration file, for example {PLT_SERVER_LOGGER_LEVEL}.

All placeholders in a configuration must be available as an environment variable +and must meet the allowed placeholder name rules.

Example

platformatic.service.json
{
"server": {
"port": "{PORT}"
}
}

Platformatic will replace the placeholders in this example with the environment +variables of the same name.

Setting environment variables

If a .env file exists it will automatically be loaded by Platformatic using +dotenv. For example:

.env
PLT_SERVER_LOGGER_LEVEL=info
PORT=8080

The .env file must be located in the same folder as the Platformatic configuration +file or in the current working directory.

Environment variables can also be set directly on the commmand line, for example:

PLT_SERVER_LOGGER_LEVEL=debug npx platformatic composer

Allowed placeholder names

Only placeholder names prefixed with PLT_, or that are in this allow list, will be +dynamically replaced in the configuration file:

  • PORT

This restriction is to avoid accidentally exposing system environment variables. +An error will be raised by Platformatic if it finds a configuration placeholder +that isn't allowed.

The default allow list can be extended by passing a --allow-env CLI option with a +comma separated list of strings, for example:

npx platformatic composer --allow-env=HOST,SERVER_LOGGER_LEVEL

If --allow-env is passed as an option to the CLI, it will be merged with the +default allow list.

+ + + + \ No newline at end of file diff --git a/docs/0.41.3/reference/composer/introduction/index.html b/docs/0.41.3/reference/composer/introduction/index.html new file mode 100644 index 00000000000..b128d2da4ba --- /dev/null +++ b/docs/0.41.3/reference/composer/introduction/index.html @@ -0,0 +1,22 @@ + + + + + +Platformatic Composer | Platformatic Open Source Software + + + + + +
+
Version: 0.41.3

Platformatic Composer

Platformatic Composer is an HTTP server that automatically aggregates multiple +services APIs into a single API.

info

Platformatic Composer is currently in public beta.

Features

Public beta

Platformatic Composer is in public beta. You can use it in production, but it's quite +likely that you'll encounter significant bugs.

If you run into a bug or have a suggestion for improvement, please +raise an issue on GitHub.

Standalone usage

If you're only interested in the features available in Platformatic Composer, you can replace platformatic with @platformatic/composer in the dependencies of your package.json, so that you'll import fewer deps.

Example configuration file

The following configuration file can be used to start a new Platformatic +Composer project. For more details on the configuration file, see the +configuration documentation.

{
"$schema": "https://platformatic.dev/schemas/v0.26.0/composer",
"server": {
"hostname": "127.0.0.1",
"port": 0,
"logger": {
"level": "info"
}
},
"composer": {
"services": [
{
"id": "auth-service",
"origin": "https://auth-service.com",
"openapi": {
"url": "/documentation/json",
"prefix": "auth"
}
},
{
"id": "payment-service",
"origin": "https://payment-service.com",
"openapi": {
"url": "/documentation/json"
}
}
],
"refreshTimeout": 1000
},
"watch": true
}
+ + + + \ No newline at end of file diff --git a/docs/0.41.3/reference/composer/plugin/index.html b/docs/0.41.3/reference/composer/plugin/index.html new file mode 100644 index 00000000000..fd7281b0b49 --- /dev/null +++ b/docs/0.41.3/reference/composer/plugin/index.html @@ -0,0 +1,18 @@ + + + + + +Plugin | Platformatic Open Source Software + + + + + +
+
Version: 0.41.3

Plugin

If you want to add features to a service, you will need to register a plugin, which will be in the form of a standard Fastify plugin.

The config file will specify where the plugin file is located as the example below:

{
...
"plugins": {
"paths": ["./plugin/index.js"]
}
}

The path is relative to the config file path.

You should export an async function which receives a parameters

  • app (FastifyInstance) that is the main fastify instance
  • opts all the options specified in the config file after path

Hot Reload

Plugin file is being watched by fs.watch function.

You don't need to reload Platformatic Composer server while working on your plugin. Every time you save, the watcher will trigger a reload event and the server will auto-restart and load your updated code.

tip

At this time, on Linux, file watch in subdirectories is not supported due to a Node.js limitation (documented here).

Directories

The path can also be a directory. In that case, the directory will be loaded with @fastify/autoload.

Consider the following directory structure:

├── routes
│ ├── foo
│ │ ├── something.js
│ │ └── bar
│ │ └── baz.js
│ ├── single-plugin
│ │ └── utils.js
│ └── another-plugin.js
└── platformatic.composer.json

By default the folder will be added as a prefix to all the routes defined within them. +See the autoload documentation for all the options to customize this behavior.

Multiple plugins

Multiple plugins can be loaded in parallel by specifying an array:

{
...
"plugins": {
"paths": [{
"path": "./plugin/index.js"
}, {
"path": "./routes/"
}]
}
}
+ + + + \ No newline at end of file diff --git a/docs/0.41.3/reference/composer/programmatic/index.html b/docs/0.41.3/reference/composer/programmatic/index.html new file mode 100644 index 00000000000..b16ead24de6 --- /dev/null +++ b/docs/0.41.3/reference/composer/programmatic/index.html @@ -0,0 +1,18 @@ + + + + + +Programmatic API | Platformatic Open Source Software + + + + + +
+
Version: 0.41.3

Programmatic API

In many cases it's useful to start Platformatic Composer using an API instead of +command line, e.g. in tests we want to start and stop our server.

The buildServer function allows that:

import { buildServer } from '@platformatic/composer'

const app = await buildServer('path/to/platformatic.composer.json')
await app.start()

const res = await fetch(app.url)
console.log(await res.json())

// do something

await app.close()

It is also possible to customize the configuration:

import { buildServer } from '@platformatic/composer'

const app = await buildServer({
server: {
hostname: '127.0.0.1',
port: 0
},
services: [
{
id: 'auth-service',
origin: 'https://auth-service.com',
openapi: {
url: '/documentation/json',
prefix: 'auth'
}
},
{
id: 'payment-service',
origin: 'https://payment-service.com',
openapi: {
file: './schemas/payment-service.json'
}
}
]
})

await app.start()

const res = await fetch(app.url)
console.log(await res.json())

// do something

await app.close()
+ + + + \ No newline at end of file diff --git a/docs/0.41.3/reference/db/authorization/introduction/index.html b/docs/0.41.3/reference/db/authorization/introduction/index.html new file mode 100644 index 00000000000..440e5ae8633 --- /dev/null +++ b/docs/0.41.3/reference/db/authorization/introduction/index.html @@ -0,0 +1,21 @@ + + + + + +Authorization | Platformatic Open Source Software + + + + + +
+
Version: 0.41.3

Authorization

Introduction

Authorization in Platformatic DB is role-based. User authentication and the +assignment of roles must be handled by an external authentication service.

Configuration

Authorization strategies and rules are configured via a Platformatic DB +configuration file. See the Platformatic DB Configuration +documentation for the supported settings.

Bypass authorization in development

To make testing and developing easier, it's possible to bypass authorization checks +if an adminSecret is set. See the HTTP headers (development only) documentation.

+ + + + \ No newline at end of file diff --git a/docs/0.41.3/reference/db/authorization/rules/index.html b/docs/0.41.3/reference/db/authorization/rules/index.html new file mode 100644 index 00000000000..e7fc31438ce --- /dev/null +++ b/docs/0.41.3/reference/db/authorization/rules/index.html @@ -0,0 +1,28 @@ + + + + + +Rules | Platformatic Open Source Software + + + + + +
+
Version: 0.41.3

Rules

Introduction

Authorization rules can be defined to control what operations users are +able to execute via the REST or GraphQL APIs that are exposed by a Platformatic +DB app.

Every rule must specify:

  • role (required) — A role name. It's a string and must match with the role(s) set by an external authentication service.
  • entity (optional) — The Platformatic DB entity to apply this rule to.
  • entities (optional) — The Platformatic DB entities to apply this rule to.
  • defaults (optional) — Configure entity fields that will be +automatically set from user data.
  • One entry for each supported CRUD operation: find, save, delete

One of entity and entities must be specified.

Operation checks

Every entity operation — such as find, insert, save or delete — can have +authorization checks specified for them. This value can be false (operation disabled) +or true (operation enabled with no checks).

To specify more fine-grained authorization controls, add a checks field, e.g.:

{
"role": "user",
"entity": "page",
"find": {
"checks": {
"userId": "X-PLATFORMATIC-USER-ID"
}
},
...
}

In this example, when a user with a user role executes a findPage, they can +access all the data that has userId equal to the value in user metadata with +key X-PLATFORMATIC-USER-ID.

Note that "userId": "X-PLATFORMATIC-USER-ID" is syntactic sugar for:

      "find": {
"checks": {
"userId": {
"eq": "X-PLATFORMATIC-USER-ID"
}
}
}

It's possible to specify more complex rules using all the supported where clause operators.

Note that userId MUST exist as a field in the database table to use this feature.

GraphQL events and subscriptions

Platformatic DB supports GraphQL subscriptions and therefore db-authorization must protect them. +The check is performed based on the find permissions, the only permissions that are supported are:

  1. find: false, the subscription for that role is disabled
  2. find: { checks: { [prop]: 'X-PLATFORMATIC-PROP' } } validates that the given prop is equal
  3. find: { checks: { [prop]: { eq: 'X-PLATFORMATIC-PROP' } } } validates that the given prop is equal

Conflicting rules across roles for different equality checks will not be supported.

Restrict access to entity fields

If a fields array is present on an operation, Platformatic DB restricts the columns on which the user can execute to that list. +For save operations, the configuration must specify all the not-nullable fields (otherwise, it would fail at runtime). +Platformatic does these checks at startup.

Example:

    "rule": {
"entity": "page",
"role": "user",
"find": {
"checks": {
"userId": "X-PLATFORMATIC-USER-ID"
},
"fields": ["id", "title"]
}
...
}

In this case, only id and title are returned for a user with a user role on the page entity.

Set entity fields from user metadata

Defaults are used in database insert and are default fields added automatically populated from user metadata, e.g.:

        "defaults": {
"userId": "X-PLATFORMATIC-USER-ID"
},

When an entity is created, the userId column is used and populated using the value from user metadata.

Programmatic rules

If it's necessary to have more control over the authorizations, it's possible to specify the rules programmatically, e.g.:


app.register(auth, {
jwt: {
secret: 'supersecret'
},
rules: [{
role: 'user',
entity: 'page',
async find ({ user, ctx, where }) {
return {
...where,
userId: {
eq: user['X-PLATFORMATIC-USER-ID']
}
}
},
async delete ({ user, ctx, where }) {
return {
...where,
userId: {
eq: user['X-PLATFORMATIC-USER-ID']
}
}
},
defaults: {
userId: async function ({ user, ctx, input }) {
match(user, {
'X-PLATFORMATIC-USER-ID': generated.shift(),
'X-PLATFORMATIC-ROLE': 'user'
})
return user['X-PLATFORMATIC-USER-ID']
}

},
async save ({ user, ctx, where }) {
return {
...where,
userId: {
eq: user['X-PLATFORMATIC-USER-ID']
}
}
}
}]
})

In this example, the user role can delete all the posts edited before yesterday:

 app.register(auth, {
jwt: {
secret: 'supersecret'
},
roleKey: 'X-PLATFORMATIC-ROLE',
anonymousRole: 'anonymous',
rules: [{
role: 'user',
entity: 'page',
find: true,
save: true,
async delete ({ user, ctx, where }) {
return {
...where,
editedAt: {
lt: yesterday
}
}
},
defaults: {
userId: 'X-PLATFORMATIC-USER-ID'
}
}]
})

Access validation on entity mapper for plugins

To assert that a specific user with it's role(s) has the correct access rights to use entities on a platformatic plugin the context should be passed to the entity mapper in order to verify it's permissions like this:

//plugin.js

app.post('/', async (req, reply) => {
const ctx = req.createPlatformaticCtx()

await app.platformatic.entities.movie.find({
where: { /*...*/ },
ctx
})
})

Skip authorization rules

In custom plugins, it's possible to skip the authorization rules on entities programmatically by setting the skipAuth flag to true or not passing a ctx, e.g.:

// this works even if the user's role doesn't have the `find` permission.
const result = await app.platformatic.entities.page.find({skipAuth: true, ...})

This has the same effect:

// this works even if the user's role doesn't have the `find` permission
const result = await app.platformatic.entities.page.find() // no `ctx`

This is useful for custom plugins for which the authentication is not necessary, so there is no user role set when invoked.

info

Skip authorization rules is not possible on the automatically generated REST and GraphQL APIs.

Avoid repetition of the same rule multiple times

Very often we end up writing the same rules over and over again. +Instead, it's possible to condense the rule for multiple entities on a single entry:

 app.register(auth, {
jwt: {
secret: 'supersecret'
},
roleKey: 'X-PLATFORMATIC-ROLE',
anonymousRole: 'anonymous',
rules: [{
role: 'anonymous',
entities: ['category', 'page'],
find: true,
delete: false,
save: false
}]
})
+ + + + \ No newline at end of file diff --git a/docs/0.41.3/reference/db/authorization/strategies/index.html b/docs/0.41.3/reference/db/authorization/strategies/index.html new file mode 100644 index 00000000000..ce664e40401 --- /dev/null +++ b/docs/0.41.3/reference/db/authorization/strategies/index.html @@ -0,0 +1,40 @@ + + + + + +Strategies | Platformatic Open Source Software + + + + + +
+
Version: 0.41.3

Strategies

Introduction

Platformatic DB supports the following authorization strategies:

JSON Web Token (JWT)

The JSON Web Token (JWT) authorization strategy is built on top +of the @fastify/jwt Fastify plugin.

Platformatic DB JWT integration

To configure it, the quickest way is to pass a shared secret in your +Platformatic DB configuration file, for example:

platformatic.db.json
{
"authorization": {
"jwt": {
"secret": "<shared-secret>"
}
}
}

By default @fastify/jwt looks for a JWT in an HTTP request's Authorization +header. This requires HTTP requests to the Platformatic DB API to include an +Authorization header like this:

Authorization: Bearer <token>

See the @fastify/jwt documentation +for all of the available configuration options.

JSON Web Key Sets (JWKS)

The JWT authorization strategy includes support for JSON Web Key Sets.

To configure it:

platformatic.db.json
{
"authorization": {
"jwt": {
"jwks": {
"allowedDomains": [
"https://ISSUER_DOMAIN"
]
}
}
}
}

When a JSON Web Token is included in a request to Platformatic DB, it retrieves the +correct public key from https:/ISSUER_DOMAIN/.well-known/jwks.json and uses it to +verify the JWT signature. The token carries all the informations, like the kid, +which is the key id used to sign the token itself, so no other configuration is required.

JWKS can be enabled without any options:

platformatic.db.json
{
"authorization": {
"jwt": {
"jwks": true
}
}
}

When configured like this, the JWK URL is calculated from the iss (issuer) field of JWT, so +every JWT token from an issuer that exposes a valid JWKS token will pass the validation. +This configuration should only be used in development, while +in every other case the allowedDomains option should be specified.

Any option supported by the get-jwks +library can be specified in the authorization.jwt.jwks object.

JWT Custom Claim Namespace

JWT claims can be namespaced to avoid name collisions. If so, we will receive tokens +with custom claims such as: https://platformatic.dev/X-PLATFORMATIC-ROLE +(where https://platformatic.cloud/ is the namespace). +If we want to map these claims to user metadata removing our namespace, we can +specify the namespace in the JWT options:

platformatic.db.json
{
"authorization": {
"jwt": {
"namespace": "https://platformatic.dev/"
}
}
}

With this configuration, the https://platformatic.dev/X-PLATFORMATIC-ROLE claim +is mapped to X-PLATFORMATIC-ROLE user metadata.

Webhook

Platformatic DB can use a webhook to authenticate requests.

Platformatic DB Webhook integration

In this case, the URL is configured on authorization:

platformatic.db.json
{
"authorization": {
"webhook": {
"url": "<webhook url>"
}
}
}

When a request is received, Platformatic sends a POST to the webhook, replicating +the same body and headers, except for:

  • host
  • connection

In the Webhook case, the HTTP response contains the roles/user information as HTTP headers.

HTTP headers (development only)

danger

Passing an admin API key via HTTP headers is highly insecure and should only be used +during development or within protected networks.

If a request has X-PLATFORMATIC-ADMIN-SECRET HTTP header set with a valid adminSecret +(see configuration reference) the +role is set automatically as platformatic-admin, unless a different role is set for +user impersonation (which is disabled if JWT or Webhook are set, see below).

Platformatic DB HTTP Headers

Also, the following rule is automatically added to every entity, allowing the user +that presented the adminSecret to perform any operation on any entity:

{
"role": "platformatic-admin",
"find": false,
"delete": false,
"save": false
}
+ + + + \ No newline at end of file diff --git a/docs/0.41.3/reference/db/authorization/user-roles-metadata/index.html b/docs/0.41.3/reference/db/authorization/user-roles-metadata/index.html new file mode 100644 index 00000000000..21ed9d31afd --- /dev/null +++ b/docs/0.41.3/reference/db/authorization/user-roles-metadata/index.html @@ -0,0 +1,31 @@ + + + + + +User Roles & Metadata | Platformatic Open Source Software + + + + + +
+
Version: 0.41.3

User Roles & Metadata

Introduction

Roles and user information are passed to Platformatic DB from an external +authentication service as a string (JWT claims or HTTP headers). We refer to +this data as user metadata.

Roles

Users can have a list of roles associated with them. These roles can be specified +in an X-PLATFORMATIC-ROLE property as a list of comma separated role names +(the key name is configurable).

Note that role names are just strings.

Reserved roles

Some special role names are reserved by Platformatic DB:

  • platformatic-admin : this identifies a user who has admin powers
  • anonymous: set automatically when no roles are associated

Anonymous role

If a user has no role, the anonymous role is assigned automatically. It's possible +to specify rules to apply to users with this role:

    {
"role": "anonymous",
"entity": "page",
"find": false,
"delete": false,
"save": false
}

In this case, a user that has no role or explicitly has the anonymous role +cannot perform any operations on the page entity.

Role impersonation

If a request includes a valid X-PLATFORMATIC-ADMIN-SECRET HTTP header it is +possible to impersonate a user roles. The roles to impersonate can be specified +by sending a X-PLATFORMATIC-ROLE HTTP header containing a comma separated list +of roles.

note

When JWT or Webhook are set, user role impersonation is not enabled, and the role +is always set as platfomatic-admin automatically if the X-PLATFORMATIC-ADMIN-SECRET +HTTP header is specified.

Role configuration

The roles key in user metadata defaults to X-PLATFORMATIC-ROLE. It's possible to change it using the roleKey field in configuration. Same for the anonymous role, which value can be changed using anonymousRole.

 "authorization": {
"roleKey": "X-MYCUSTOM-ROLE_KEY",
"anonymousRole": "anonym",
"rules": [
...
]
}

User metadata

User roles and other user data, such as userId, are referred to by Platformatic +DB as user metadata.

User metadata is parsed from an HTTP request and stored in a user object on the +Fastify request object. This object is populated on-demand, but it's possible +to populate it explicity with await request.setupDBAuthorizationUser().

+ + + + \ No newline at end of file diff --git a/docs/0.41.3/reference/db/configuration/index.html b/docs/0.41.3/reference/db/configuration/index.html new file mode 100644 index 00000000000..86c50fed019 --- /dev/null +++ b/docs/0.41.3/reference/db/configuration/index.html @@ -0,0 +1,59 @@ + + + + + +Configuration | Platformatic Open Source Software + + + + + +
+
Version: 0.41.3

Configuration

Platformatic DB is configured with a configuration file. It supports the use +of environment variables as setting values with configuration placeholders.

Configuration file

If the Platformatic CLI finds a file in the current working directory matching +one of these filenames, it will automatically load it:

  • platformatic.db.json
  • platformatic.db.json5
  • platformatic.db.yml or platformatic.db.yaml
  • platformatic.db.tml or platformatic.db.toml

Alternatively, a --config option with a configuration +filepath can be passed to most platformatic db CLI commands.

The configuration examples in this reference use JSON.

Supported formats

FormatExtensions
JSON.json
JSON5.json5
YAML.yml, .yaml
TOML.tml

Comments are supported by the JSON5, YAML and TOML file formats.

Settings

Configuration settings are organised into the following groups:

Sensitive configuration settings, such as a database connection URL that contains +a password, should be set using configuration placeholders.

db

A required object with the following settings:

  • connectionString (required, string) — Database connection URL.

    • Example: postgres://user:password@my-database:5432/db-name
  • schema (array of string) - Currently supported only for postgres, schemas used tolook for entities. If not provided, the default public schema is used.

    Examples

  "db": {
"connectionString": "(...)",
"schema": [
"schema1", "schema2"
],
...

},

  • Platformatic DB supports MySQL, MariaDB, PostgreSQL and SQLite.

  • graphql (boolean or object, default: true) — Controls the GraphQL API interface, with optional GraphiQL UI.

    Examples

    Enables GraphQL support

    {
    "db": {
    ...
    "graphql": true
    }
    }

    Enables GraphQL support with GraphiQL

    {
    "db": {
    ...
    "graphql": {
    "graphiql": true
    }
    }
    }

    It's possible to selectively ignore entites:

    {
    "db": {
    ...
    "graphql": {
    "ignore": {
    "categories": true
    }
    }
    }
    }

    It's possible to selectively ignore fields:

    {
    "db": {
    ...
    "graphql": {
    "ignore": {
    "categories": {
    "name": true
    }
    }
    }
    }
    }

    It's possible to add a custom GraphQL schema during the startup:

    {
    "db": {
    ...
    "graphql": {
    "schemaPath": "path/to/schema.graphql"
    }
    }
    }
    }
  • openapi (boolean or object, default: true) — Enables OpenAPI REST support.

    • If value is an object, all OpenAPI v3 allowed properties can be passed. Also a prefix property can be passed to set the OpenAPI prefix.
    • Platformatic DB uses @fastify/swagger under the hood to manage this configuration.

    Examples

    Enables OpenAPI

    {
    "db": {
    ...
    "openapi": true
    }
    }

    Enables OpenAPI with prefix

    {
    "db": {
    ...
    "openapi": {
    "prefix": "/api"
    }
    }
    }

    Enables OpenAPI with options

    {
    "db": {
    ...
    "openapi": {
    "info": {
    "title": "Platformatic DB",
    "description": "Exposing a SQL database as REST"
    }
    }
    }
    }

    You can for example add the security section, so that Swagger will allow you to add the authentication header to your requests. +In the following code snippet, we're adding a Bearer token in the form of a JWT:

    {
    "db": {
    ...
    "openapi": {
    ...
    "security": [{ "bearerAuth": [] }],
    "components": {
    "securitySchemes": {
    "bearerAuth": {
    "type": "http",
    "scheme": "bearer",
    "bearerFormat": "JWT"
    }
    }
    }
    }
    }
    }

    It's possible to selectively ignore entites:

    {
    "db": {
    ...
    "openapi": {
    "ignore": {
    "categories": true
    }
    }
    }
    }

    It's possible to selectively ignore fields:

    {
    "db": {
    ...
    "openapi": {
    "ignore": {
    "categories": {
    "name": true
    }
    }
    }
    }
    }
  • ignore (object) — Key/value object that defines which database tables should not be mapped as API entities.

    Examples

    {
    "db": {
    ...
    "ignore": {
    "versions": true // "versions" table will be not mapped with GraphQL/REST APIs
    }
    }
    }
  • events (boolean or object, default: true) — Controls the support for events published by the SQL mapping layer. +If enabled, this option add support for GraphQL Subscription over WebSocket. By default it uses an in-process message broker. +It's possible to configure it to use Redis instead.

    Examples

    {
    "db": {
    ...
    "events": {
    "connectionString": "redis://:password@redishost.com:6380/"
    }
    }
    }
  • schemalock (boolean or object, default: false) — Controls the caching of the database schema on disk. +If set to true the database schema metadata is stored inside a schema.lock file. +It's also possible to configure the location of that file by specifying a path, like so:

    Examples

    {
    "db": {
    ...
    "schemalock": {
    "path": "./dbmetadata"
    }
    }
    }

    Starting Platformatic DB or running a migration will automatically create the schemalock file.

metrics

Configuration for a Prometheus server that will export monitoring metrics +for the current server instance. It uses fastify-metrics +under the hood.

This setting can be a boolean or an object. If set to true the Prometheus server will listen on http://0.0.0.0:9090.

Supported object properties:

  • hostname (string) — The hostname where Prometheus server will listen for connections.
  • port (number) — The port where Prometheus server will listen for connections.
  • auth (object) — Basic Auth configuration. username and password are required here +(use environment variables).

migrations

Configures Postgrator to run migrations against the database.

An optional object with the following settings:

  • dir (required, string): Relative path to the migrations directory.
  • autoApply (boolean, default: false): Automatically apply migrations when Platformatic DB server starts.

plugins

An optional object that defines the plugins loaded by Platformatic DB.

  • paths (required, array): an array of paths (string) +or an array of objects composed as follows,
    • path (string): Relative path to plugin's entry point.
    • options (object): Optional plugin options.
    • encapsulate (boolean): if the path is a folder, it instruct Platformatic to not +encapsulate those plugins, +allowing decorators and hooks to be shared across all routes.
    • maxDepth (integer): if the path is a folder, it limits the depth to load the content from.
  • typescript (boolean or object): enable TypeScript compilation. A tsconfig.json file is required in the same folder.
{
"plugins": {
"paths": [{
"path": "./my-plugin.js",
"options": {
"foo": "bar"
}
}]
}
}

typescript compilation options

The typescript option can also be an object to customize the compilation. Here are the supported options:

  • enabled (boolean): enables compilation
  • tsConfig (string): path to the tsconfig.json file relative to the configuration
  • outDir (string): the output directory of tsconfig.json, in case tsconfig.json is not available +and and enabled is set to false (procution build)
  • flags (array of string): flags to be passed to tsc. Overrides tsConfig. +

Example:

{
"plugins": {
"paths": [{
"path": "./my-plugin.js",
"options": {
"foo": "bar"
}
}],
"typescript": {
"enabled": false,
"tsConfig": "./path/to/tsconfig.json",
"outDir": "dist"
}
}
}

watch

Disable watching for file changes if set to false. It can also be customized with the following options:

  • ignore (string[], default: null): List of glob patterns to ignore when watching for changes. If null or not specified, ignore rule is not applied. Ignore option doesn't work for typescript files.

  • allow (string[], default: ['*.js', '**/*.js']): List of glob patterns to allow when watching for changes. If null or not specified, allow rule is not applied. Allow option doesn't work for typescript files.

    Example

    {
    "watch": {
    "ignore": ["*.mjs", "**/*.mjs"],
    "allow": ["my-plugin.js", "plugins/*.js"]
    }
    }

server

A required object with the following settings:

  • hostname (required, string) — Hostname where Platformatic DB server will listen for connections.

  • port (required, number) — Port where Platformatic DB server will listen for connections.

  • healthCheck (boolean or object) — Enables the health check endpoint.

    • Powered by @fastify/under-pressure.
    • The value can be an object, used to specify the interval between checks in milliseconds (default: 5000)

    Example

    {
    "server": {
    ...
    "healthCheck": {
    "interval": 2000
    }
    }
    }
  • cors (object) — Configuration for Cross-Origin Resource Sharing (CORS) headers.

    • All options will be passed to the @fastify/cors plugin. In order to specify a RegExp object, you can pass { regexp: 'yourregexp' }, +it will be automatically converted
  • https (object) - Configuration for HTTPS supporting the following options.

    • key (required, string, object, or array) - If key is a string, it specifies the private key to be used. If key is an object, it must have a path property specifying the private key file. Multiple keys are supported by passing an array of keys.
    • cert (required, string, object, or array) - If cert is a string, it specifies the certificate to be used. If cert is an object, it must have a path property specifying the certificate file. Multiple certificates are supported by passing an array of keys.
  • logger (object) -- the logger configuration.

  • pluginTimeout (integer) -- the number of milliseconds to wait for a Fastify plugin to load

  • bodyLimit (integer) -- the maximum request body size in bytes

  • maxParamLength (integer) -- the maximum length of a request parameter

  • caseSensitive (boolean) -- if true, the router will be case sensitive

  • ignoreTrailingSlash (boolean) -- if true, the router will ignore the trailing slash

  • ignoreTrailingSlash (boolean) -- if true, the router will ignore the trailing slash

  • connectionTimeout (integer) -- the milliseconds to wait for a new HTTP request

  • keepAliveTimeout (integer) -- the milliseconds to wait for a keep-alive HTTP request

  • maxRequestsPerSocket (integer) -- the maximum number of requests per socket

  • forceCloseConnections (boolean or "idle") -- if true, the server will close all connections when it is closed

  • requestTimeout (integer) -- the milliseconds to wait for a request to be completed

  • disableRequestLogging (boolean) -- if true, the request logger will be disabled

  • exposeHeadRoutes (boolean) -- if true, the router will expose HEAD routes

  • serializerOpts (object) -- the serializer options

  • requestIdHeader (string or false) -- the name of the header that will contain the request id

  • requestIdLogLabel (string) -- Defines the label used for the request identifier when logging the request. default: 'reqId'

  • jsonShorthand (boolean) -- default: true -- visit fastify docs for more details

  • trustProxy (boolean or integer or string or String[]) -- default: false -- visit fastify docs for more details

tip

See the fastify docs for more details.

authorization

An optional object with the following settings:

  • adminSecret (string): A secret that should be sent in an +x-platformatic-admin-secret HTTP header when performing GraphQL/REST API +calls. Use an environment variable placeholder +to securely provide the value for this setting.
  • roleKey (string, default: X-PLATFORMATIC-ROLE): The name of the key in user +metadata that is used to store the user's roles. See Role configuration.
  • anonymousRole (string, default: anonymous): The name of the anonymous role. See Role configuration.
  • jwt (object): Configuration for the JWT authorization strategy. +Any option accepted by @fastify/jwt +can be passed in this object.
  • webhook (object): Configuration for the Webhook authorization strategy.
    • url (required, string): Webhook URL that Platformatic DB will make a +POST request to.
  • rules (array): Authorization rules that describe the CRUD actions that +users are allowed to perform against entities. See Rules +documentation.
note

If an authorization object is present, but no rules are specified, no CRUD +operations are allowed unless adminSecret is passed.

Example

platformatic.db.json
{
"authorization": {
"jwt": {
"secret": "{PLT_AUTHORIZATION_JWT_SECRET}"
},
"rules": [
...
]
}
}

telemetry

Open Telemetry is optionally supported with these settings:

  • serviceName (required, string) — Name of the service as will be reported in open telemetry.
  • version (string) — Optional version (free form)
  • skip (array). Optional list of operations to skip when exporting telemetry defined object with properties:
    • method: GET, POST, PUT, DELETE, PATCH, HEAD, OPTIONS, TRACE
    • path. e.g.: /documentation/json
  • exporter (object or array) — Exporter configuration. If not defined, the exporter defaults to console. If an array of objects is configured, every object must be a valid exporter object. The exporter object has the following properties:
    • type (string) — Exporter type. Supported values are console, otlp, zipkin and memory (default: console). memory is only supported for testing purposes.
    • options (object) — These options are supported:
      • url (string) — The URL to send the telemetry to. Required for otlp exporter. This has no effect on console and memory exporters.
      • headers (object) — Optional headers to send with the telemetry. This has no effect on console and memory exporters.

Note that OTLP traces can be consumed by different solutions, like Jaeger. Here the full list.

Example

{
"telemetry": {
"serviceName": "test-service",
"exporter": {
"type": "otlp",
"options": {
"url": "http://localhost:4318/v1/traces"
}
}
}
}

Environment variable placeholders

The value for any configuration setting can be replaced with an environment variable +by adding a placeholder in the configuration file, for example {PLT_SERVER_LOGGER_LEVEL}.

All placeholders in a configuration must be available as an environment variable +and must meet the allowed placeholder name rules.

Example

platformatic.db.json
{
"db": {
"connectionString": "{DATABASE_URL}"
},
"server": {
"logger": {
"level": "{PLT_SERVER_LOGGER_LEVEL}"
},
"port": "{PORT}"
}
}

Platformatic will replace the placeholders in this example with the environment +variables of the same name.

Setting environment variables

If a .env file exists it will automatically be loaded by Platformatic using +dotenv. For example:

.env
PLT_SERVER_LOGGER_LEVEL=info
PORT=8080

The .env file must be located in the same folder as the Platformatic configuration +file or in the current working directory.

Environment variables can also be set directly on the commmand line, for example:

PLT_SERVER_LOGGER_LEVEL=debug npx platformatic db

Allowed placeholder names

Only placeholder names prefixed with PLT_, or that are in this allow list, will be +dynamically replaced in the configuration file:

  • PORT
  • DATABASE_URL

This restriction is to avoid accidentally exposing system environment variables. +An error will be raised by Platformatic if it finds a configuration placeholder +that isn't allowed.

The default allow list can be extended by passing a --allow-env CLI option with a +comma separated list of strings, for example:

npx platformatic db start --allow-env=HOST,SERVER_LOGGER_LEVEL
# OR
npx platformatic start --allow-env=HOST,SERVER_LOGGER_LEVEL

If --allow-env is passed as an option to the CLI, it will be merged with the +default allow list.

Sample Configuration

This is a bare minimum configuration for Platformatic DB. Uses a local ./db.sqlite SQLite database, with OpenAPI and GraphQL support.

Server will listen to http://127.0.0.1:3042

{
"server": {
"hostname": "127.0.0.1",
"port": "3042"
},
"db": {
"connectionString": "sqlite://./db.sqlite",
"graphiql": true,
"openapi": true,
"graphql": true
}
}
+ + + + \ No newline at end of file diff --git a/docs/0.41.3/reference/db/introduction/index.html b/docs/0.41.3/reference/db/introduction/index.html new file mode 100644 index 00000000000..3873de01b9b --- /dev/null +++ b/docs/0.41.3/reference/db/introduction/index.html @@ -0,0 +1,25 @@ + + + + + +Platformatic DB | Platformatic Open Source Software + + + + + +
+
Version: 0.41.3

Platformatic DB

Platformatic DB is an HTTP server that provides a flexible set of tools for +building robust APIs with Node.js.

For a high level overview of how Platformatic DB works, please reference the +Architecture guide.

info

Platformatic DB is currently in public beta.

Features

info

Get up and running in 2 minutes using our +Quick Start Guide

Supported databases

DatabaseVersion
SQLite3.
PostgreSQL>= 15
MySQL>= 5.7
MariaDB>= 10.11

The required database driver is automatically inferred and loaded based on the +value of the connectionString +configuration setting.

Public beta

Platformatic DB is in public beta. You can use it in production, but it's quite +likely that you'll encounter significant bugs.

If you run into a bug or have a suggestion for improvement, please +raise an issue on GitHub.

+ + + + \ No newline at end of file diff --git a/docs/0.41.3/reference/db/logging/index.html b/docs/0.41.3/reference/db/logging/index.html new file mode 100644 index 00000000000..69258829b46 --- /dev/null +++ b/docs/0.41.3/reference/db/logging/index.html @@ -0,0 +1,25 @@ + + + + + +Logging | Platformatic Open Source Software + + + + + +
+
Version: 0.41.3

Logging

Platformatic DB uses a low overhead logger named Pino +to output structured log messages.

Logger output level

By default the logger output level is set to info, meaning that all log messages +with a level of info or above will be output by the logger. See the +Pino documentation +for details on the supported log levels.

The logger output level can be overriden by adding a logger object to the server +configuration settings group:

platformatic.db.json
{
"server": {
"logger": {
"level": "error"
},
...
},
...
}

Log formatting

If you run Platformatic DB in a terminal, where standard out (stdout) +is a TTY:

  • pino-pretty is automatically used +to pretty print the logs and make them easier to read during development.
  • The Platformatic logo is printed (if colors are supported in the terminal emulator)

Example:

$ npx platformatic db start




/////////////
///// /////
/// ///
/// ///
/// ///
&& /// /// &&
&&&&&& /// /// &&&&&&
&&&& /// /// &&&&
&&& /// /// &&&&&&&&&&&&
&&& /// /////// //// && &&&&&
&& /// /////////////// &&&
&&& /// /// &&&
&&& /// // &&
&&& /// &&
&&& /// &&&
&&&& /// &&&
&&&&& /// &&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&
///
///
///
///
///
///


[11:20:33.466] INFO (337606): server listening
url: "http://127.0.0.1:3042"

If stdout is redirected to a non-TTY, the logo is not printed and the logs are +formatted as newline-delimited JSON:

$ npx platformatic db start | head
{"level":30,"time":1665566628973,"pid":338365,"hostname":"darkav2","url":"http://127.0.0.1:3042","msg":"server listening"}
+ + + + \ No newline at end of file diff --git a/docs/0.41.3/reference/db/migrations/index.html b/docs/0.41.3/reference/db/migrations/index.html new file mode 100644 index 00000000000..ebdc42fc6e8 --- /dev/null +++ b/docs/0.41.3/reference/db/migrations/index.html @@ -0,0 +1,17 @@ + + + + + +Migrations | Platformatic Open Source Software + + + + + +
+
Version: 0.41.3

Migrations

It uses Postgrator under the hood to run migrations. Please refer to the Postgrator documentation for guidance on writing migration files.

In brief, you should create a file structure like this

migrations/
|- 001.do.sql
|- 001.undo.sql
|- 002.do.sql
|- 002.undo.sql
|- 003.do.sql
|- 003.undo.sql
|- 004.do.sql
|- 004.undo.sql
|- ... and so on

Postgrator uses a table in your schema, to store which migrations have been already processed, so that only new ones will be applied at every server start.

You can always rollback some migrations specifing what version you would like to rollback to.

Example

$ platformatic db migrations apply --to 002

Will execute 004.undo.sql, 003.undo.sql in this order. If you keep those files in migrations directory, when the server restarts it will execute 003.do.sql and 004.do.sql in this order if the autoApply value is true, or you can run the db migrations apply command.

It's also possible to rollback a single migration with -r:

$ platformatic db migrations apply -r 

How to run migrations

There are two ways to run migrations in Platformatic DB. They can be processed automatically when the server starts if the autoApply value is true, or you can just run the db migrations apply command.

In both cases you have to edit your config file to tell Platformatic DB where are your migration files.

Automatically on server start

To run migrations when Platformatic DB starts, you need to use the config file root property migrations.

There are two options in the "migrations" property

  • dir (required) the directory where the migration files are located. It will be relative to the config file path.
  • autoApply a boolean value that tells Platformatic DB to auto-apply migrations or not (default: false)

Example

{
...
"migrations": {
"dir": "./path/to/migrations/folder",
"autoApply": false
}
}

Manually with the CLI

See documentation about db migrations apply command

In short:

  • be sure to define a correct migrations.dir folder under the config on platformatic.db.json
  • get the MIGRATION_NUMBER (f.e. if the file is named 002.do.sql will be 002)
  • run npx platformatic db migrations apply --to MIGRATION_NUMBER
+ + + + \ No newline at end of file diff --git a/docs/0.41.3/reference/db/plugin/index.html b/docs/0.41.3/reference/db/plugin/index.html new file mode 100644 index 00000000000..8eb9f3937a9 --- /dev/null +++ b/docs/0.41.3/reference/db/plugin/index.html @@ -0,0 +1,19 @@ + + + + + +Plugin | Platformatic Open Source Software + + + + + +
+
Version: 0.41.3

Plugin

If you want to extend Platformatic DB features, it is possible to register a plugin, which will be in the form of a standard Fastify plugin.

The config file will specify where the plugin file is located as the example below:

{
...
"plugins": {
"paths": ["./plugin/index.js"]
}
}

The paths are relative to the config file path.

Once the config file is set up, you can write your plugin to extend Platformatic DB API or write your custom business logic.

You should export an async function which receives a parameters

  • app (FastifyInstance) that is the main fastify instance running Platformatic DB
  • opts all the options specified in the config file after path
  • You can always access Platformatic data mapper through app.platformatic property.
info

To make sure that a user has the appropriate set of permissions to perform any action on an entity the context should be passed to the entity mapper operation like this:

app.post('/', async (req, reply) => {
const ctx = req.createPlatformaticCtx()

await app.platformatic.entities.movies.find({
where: { /*...*/ },
ctx
})
})

Check some examples.

Hot Reload

Plugin file is being watched by fs.watch function.

You don't need to reload Platformatic DB server while working on your plugin. Every time you save, the watcher will trigger a reload event and the server will auto-restart and load your updated code.

tip

At this time, on Linux, file watch in subdirectories is not supported due to a Node.js limitation (documented here).

Directories

The path can also be a directory. In that case, the directory will be loaded with @fastify/autoload.

Consider the following directory structure:

├── routes
│ ├── foo
│ │ ├── something.js
│ │ └── bar
│ │ └── baz.js
│ ├── single-plugin
│ │ └── utils.js
│ └── another-plugin.js
└── platformatic.service.json

By default the folder will be added as a prefix to all the routes defined within them. +See the autoload documentation for all the options to customize this behavior.

Multiple plugins

Multiple plugins can be loaded in parallel by specifying an array:

{
...
"plugins": {
"paths": [{
"path": "./plugin/index.js"
}, {
"path": "./routes/"
}]
}
}

fastify.swagger()

TypeScript and autocompletion

If you want to access any of the types provided by Platformatic DB, generate them using the platformatic db types command. +This will create a global.d.ts file that you can now import everywhere, like so:

/// <references <types="./global.d.ts" />

Remember to adjust the path to global.d.ts.

Plugin definition with TypeScript

Here is an example of writing a plugin in TypeScript:

/// <reference types="./global.d.ts" />
import { FastifyInstance, FastifyPluginOptions } from 'fastify'

export default async function (fastify: FastifyInstance, opts: FastifyPluginOptions) {
}

Note that you need to add the "plugins": { "typescript": true } configuration to your platformatic.service.json.

+ + + + \ No newline at end of file diff --git a/docs/0.41.3/reference/db/programmatic/index.html b/docs/0.41.3/reference/db/programmatic/index.html new file mode 100644 index 00000000000..ac83c9d4a65 --- /dev/null +++ b/docs/0.41.3/reference/db/programmatic/index.html @@ -0,0 +1,17 @@ + + + + + +Programmatic API | Platformatic Open Source Software + + + + + +
+
Version: 0.41.3

Programmatic API

It's possible to start an instance of Platformatic DB from JavaScript.

import { buildServer } from '@platformatic/db'

const app = await buildServer('/path/to/platformatic.db.json')

await app.start() // this will start our server

console.log('URL', app.url)

const res = await fetch(app.url)
console.log(await res.json())

// do something

await app.close()

It is also possible to customize the configuration:

import { buildServer } from '@platformatic/db'

const app = await buildServer({
server: {
hostname: '127.0.0.1',
port: 0
},
db: {
connectionString: 'sqlite://test.sqlite'
},
})

await app.start() // this will start our server

console.log('URL', app.url)

const res = await fetch(app.url)
console.log(await res.json())

// do something

await app.close()

For more details on how this is implemented, read Platformatic Service Programmatic API.

API

buildServer(config)

Returns an instance of the restartable application

RestartableApp

.start()

Listen to the hostname/port combination specified in the config.

.restart()

Restart the Fastify application

.close()

Stops the application.

+ + + + \ No newline at end of file diff --git a/docs/0.41.3/reference/db/schema-support/index.html b/docs/0.41.3/reference/db/schema-support/index.html new file mode 100644 index 00000000000..8b22105c075 --- /dev/null +++ b/docs/0.41.3/reference/db/schema-support/index.html @@ -0,0 +1,21 @@ + + + + + +Schema support | Platformatic Open Source Software + + + + + +
+
Version: 0.41.3

Schema support

It's possible to specify the schemas where the tables are located (if the database supports schemas). +PlatformaticDB will inspect this schemas to create the entities

Example

CREATE SCHEMA IF NOT EXISTS "test1";
CREATE TABLE IF NOT EXISTS test1.movies (
id INTEGER PRIMARY KEY,
title TEXT NOT NULL
);

CREATE SCHEMA IF NOT EXISTS "test2";
CREATE TABLE IF NOT EXISTS test2.users (
id INTEGER PRIMARY KEY,
title TEXT NOT NULL
);

The schemas must be specified in configuration in the schema section. +Note that if we use schemas and migrations, we must specify the schema in the migrations table as well +(with postgresql, we assume we use the default public schema).

  ...
"db": {
"connectionString": "(...)",
"schema": [
"test1", "test2"
],
"ignore": {
"versions": true
}
},
"migrations": {
"dir": "migrations",
"table": "test1.versions"
},

...

The entities name are then generated in the form schemaName + entityName, PascalCase (this is necessary to avoid name collisions in case there are tables with same name in different schemas). +So for instance for the example above we generate the Test1Movie and Test2User entities.

info

Please pay attention to the entity names when using schema, these are also used to setup authorization rules

+ + + + \ No newline at end of file diff --git a/docs/0.41.3/reference/runtime/configuration/index.html b/docs/0.41.3/reference/runtime/configuration/index.html new file mode 100644 index 00000000000..3cd49ec7d1e --- /dev/null +++ b/docs/0.41.3/reference/runtime/configuration/index.html @@ -0,0 +1,67 @@ + + + + + +Configuration | Platformatic Open Source Software + + + + + +
+
Version: 0.41.3

Configuration

Platformatic Runtime is configured with a configuration file. It supports the +use of environment variables as setting values with configuration placeholders.

Configuration file

If the Platformatic CLI finds a file in the current working directory matching +one of these filenames, it will automatically load it:

  • platformatic.runtime.json
  • platformatic.runtime.json5
  • platformatic.runtime.yml or platformatic.runtime.yaml
  • platformatic.runtime.tml or platformatic.runtime.toml

Alternatively, a --config option with a configuration +filepath can be passed to most platformatic runtime CLI commands.

The configuration examples in this reference use JSON.

Supported formats

FormatExtensions
JSON.json
JSON5.json5
YAML.yml, .yaml
TOML.tml

Comments are supported by the JSON5, YAML and TOML file formats.

Settings

Configuration settings are organized into the following groups:

Configuration settings containing sensitive data should be set using +configuration placeholders.

The autoload and services settings can be used together, but at least one +of them must be provided. When the configuration file is parsed, autoload +configuration is translated into services configuration.

autoload

The autoload configuration is intended to be used with monorepo applications. +autoload is an object with the following settings:

  • path (required, string) - The path to a directory containing the +microservices to load. In a traditional monorepo application, this directory is +typically named packages.
  • exclude (array of strings) - Child directories inside of path that +should not be processed.
  • mappings (object) - Each microservice is given an ID and is expected +to have a Platformatic configuration file. By default the ID is the +microservice's directory name, and the configuration file is expected to be a +well-known Platformatic configuration file. mappings can be used to override +these default values.
    • id (required, string) - The overridden ID. This becomes the new +microservice ID.
    • config (required**, string) - The overridden configuration file +name. This is the file that will be used when starting the microservice.

services

services is an array of objects that defines the microservices managed by the +runtime. Each service object supports the following settings:

  • id (required, string) - A unique identifier for the microservice. +When working with the Platformatic Composer, this value corresponds to the id +property of each object in the services section of the config file. When +working with client objects, this corresponds to the optional serviceId +property or the name field in the client's package.json file if a +serviceId is not explicitly provided.
  • path (required, string) - The path to the directory containing +the microservice.
  • config (required, string) - The configuration file used to start +the microservice.

entrypoint

The Platformatic Runtime's entrypoint is a microservice that is exposed +publicly. This value must be the ID of a service defined via the autoload or +services configuration.

hotReload

An optional boolean, defaulting to false, indicating if hot reloading should +be enabled for the runtime. If this value is set to false, it will disable +hot reloading for any microservices managed by the runtime. If this value is +true, hot reloading for individual microservices is managed by the +configuration of that microservice.

danger

While hot reloading is useful for development, it is not recommended for use in +production.

allowCycles

An optional boolean, defaulting to false, indicating if dependency cycles +are allowed between microservices managed by the runtime. When the Platformatic +Runtime parses the provided configuration, it examines the clients of each +microservice, as well as the services of Platformatic Composer applications to +build a dependency graph. A topological sort is performed on this dependency +graph so that each service is started after all of its dependencies have been +started. If there are cycles, the topological sort fails and the Runtime does +not start any applications.

If allowCycles is true, the topological sort is skipped, and the +microservices are started in the order specified in the configuration file.

telemetry

Open Telemetry is optionally supported with these settings:

  • serviceName (required, string) — Name of the service as will be reported in open telemetry. In the runtime case, the name of the services as reported in traces is ${serviceName}-${serviceId}, where serviceId is the id of the service in the runtime.
  • version (string) — Optional version (free form)
  • skip (array). Optional list of operations to skip when exporting telemetry defined object with properties:
    • method: GET, POST, PUT, DELETE, PATCH, HEAD, OPTIONS, TRACE
    • path. e.g.: /documentation/json
  • exporter (object or array) — Exporter configuration. If not defined, the exporter defaults to console. If an array of objects is configured, every object must be a valid exporter object. The exporter object has the following properties:
    • type (string) — Exporter type. Supported values are console, otlp, zipkin and memory (default: console). memory is only supported for testing purposes.
    • options (object) — These options are supported:
      • url (string) — The URL to send the telemetry to. Required for otlp exporter. This has no effect on console and memory exporters.
      • headers (object) — Optional headers to send with the telemetry. This has no effect on console and memory exporters.

Note that OTLP traces can be consumed by different solutions, like Jaeger. Here the full list.

Example

{
"telemetry": {
"serviceName": "test-service",
"exporter": {
"type": "otlp",
"options": {
"url": "http://localhost:4318/v1/traces"
}
}
}
}

Environment variable placeholders

The value for any configuration setting can be replaced with an environment +variable by adding a placeholder in the configuration file, for example +{PLT_ENTRYPOINT}.

All placeholders in a configuration must be available as an environment +variable and must meet the +allowed placeholder name rules.

Setting environment variables

If a .env file exists it will automatically be loaded by Platformatic using +dotenv. For example:

.env
PLT_ENTRYPOINT=service

The .env file must be located in the same folder as the Platformatic +configuration file or in the current working directory.

Environment variables can also be set directly on the commmand line, for example:

PLT_ENTRYPOINT=service npx platformatic runtime

Allowed placeholder names

Only placeholder names prefixed with PLT_, or that are in this allow list, +will be dynamically replaced in the configuration file:

  • PORT
  • DATABASE_URL

This restriction is to avoid accidentally exposing system environment variables. +An error will be raised by Platformatic if it finds a configuration placeholder +that isn't allowed.

The default allow list can be extended by passing a --allow-env CLI option +with a comma separated list of strings, for example:

npx platformatic runtime --allow-env=HOST,SERVER_LOGGER_LEVEL

If --allow-env is passed as an option to the CLI, it will be merged with the +default allow list.

+ + + + \ No newline at end of file diff --git a/docs/0.41.3/reference/runtime/introduction/index.html b/docs/0.41.3/reference/runtime/introduction/index.html new file mode 100644 index 00000000000..827424819fe --- /dev/null +++ b/docs/0.41.3/reference/runtime/introduction/index.html @@ -0,0 +1,33 @@ + + + + + +Platformatic Runtime | Platformatic Open Source Software + + + + + +
+
Version: 0.41.3

Platformatic Runtime

Platformatic Runtime is an environment for running multiple Platformatic +microservices as a single monolithic deployment unit.

info

Platformatic Runtime is currently in public beta.

Features

Public beta

Platformatic Runtime is in public beta. You can use it in production, but it's quite +likely that you'll encounter significant bugs.

If you run into a bug or have a suggestion for improvement, please +raise an issue on GitHub.

Standalone usage

If you're only interested in the features available in Platformatic Runtime, you can replace platformatic with @platformatic/runtime in the dependencies of your package.json, so that you'll import fewer deps.

Example configuration file

The following configuration file can be used to start a new Platformatic +Runtime project. For more details on the configuration file, see the +configuration documentation.

{
"$schema": "https://platformatic.dev/schemas/v0.26.0/runtime",
"autoload": {
"path": "./packages",
"exclude": ["docs"]
},
"entrypoint": "entrypointApp"
}

TypeScript Compilation

Platformatic Runtime streamlines the compilation of all services built on TypeScript with the command +plt runtime compile. The TypeScript compiler (tsc) is required to be installed separately.

Interservice communication

The Platformatic Runtime allows multiple microservice applications to run +within a single process. Only the entrypoint binds to an operating system +port and can be reached from outside of the runtime.

Within the runtime, all interservice communication happens by injecting HTTP +requests into the running servers, without binding them to ports. This injection +is handled by +fastify-undici-dispatcher.

Each microservice is assigned an internal domain name based on its unique ID. +For example, a microservice with the ID awesome is given the internal domain +of http://awesome.plt.local. The fastify-undici-dispatcher module maps that +domain to the Fastify server running the awesome microservice. Any Node.js +APIs based on Undici, such as fetch(), will then automatically route requests +addressed to awesome.plt.local to the corresponding Fastify server.

+ + + + \ No newline at end of file diff --git a/docs/0.41.3/reference/runtime/programmatic/index.html b/docs/0.41.3/reference/runtime/programmatic/index.html new file mode 100644 index 00000000000..c27438b3ff5 --- /dev/null +++ b/docs/0.41.3/reference/runtime/programmatic/index.html @@ -0,0 +1,28 @@ + + + + + +Programmatic API | Platformatic Open Source Software + + + + + +
+
Version: 0.41.3

Programmatic API

In many cases it's useful to start Platformatic applications using an API +instead of the command line. The @platformatic/runtime API makes it simple to +work with different application types (e.g. service, db, composer and runtime) without +needing to know the application type a priori.

buildServer()

The buildServer function creates a server from a provided configuration +object or configuration filename. +The config can be of either Platformatic Service, Platformatic DB, +Platformatic Composer or any other application built on top of +Platformatic Service.

import { buildServer } from '@platformatic/runtime'

const app = await buildServer('path/to/platformatic.runtime.json')
const entrypointUrl = await app.start()

// Make a request to the entrypoint.
const res = await fetch(entrypointUrl)
console.log(await res.json())

// Do other interesting things.

await app.close()

It is also possible to customize the configuration:

import { buildServer } from '@platformatic/runtime'

const config = {
// $schema: 'https://platformatic.dev/schemas/v0.39.0/runtime',
// $schema: 'https://platformatic.dev/schemas/v0.39.0/service',
// $schema: 'https://platformatic.dev/schemas/v0.39.0/db',
// $schema: 'https://platformatic.dev/schemas/v0.39.0/composer'
...
}
const app = await buildServer(config)

await app.start()

loadConfig()

The loadConfig function is used to read and parse a configuration file for +an arbitrary Platformatic application.

import { loadConfig } from '@platformatic/runtime'

// Read the config based on command line arguments. loadConfig() will detect
// the application type.
const config = await loadConfig({}, ['-c', '/path/to/platformatic.config.json'])

// Read the config based on command line arguments. The application type can
// be provided explicitly.
const config = await loadConfig(
{},
['-c', '/path/to/platformatic.config.json']
)

// Default config can be specified.
const config = await loadConfig(
{},
['-c', '/path/to/platformatic.config.json'],
{ key: 'value' }
)

start()

The start function loads a configuration, builds a server, and starts the +server. However, the server is not returned.

import { start } from '@platformatic/runtime'

await start(['-c', '/path/to/platformatic.config.json])

startCommand()

The startCommand function is similar to start. However, if an exception +occurs, startCommand logs the error and exits the process. This is different +from start, which throws the exception.

import { startCommand } from '@platformatic/runtime'

await startCommand(['-c', '/path/to/platformatic.config.json])
+ + + + \ No newline at end of file diff --git a/docs/0.41.3/reference/service/configuration/index.html b/docs/0.41.3/reference/service/configuration/index.html new file mode 100644 index 00000000000..b2c1f085974 --- /dev/null +++ b/docs/0.41.3/reference/service/configuration/index.html @@ -0,0 +1,38 @@ + + + + + +Configuration | Platformatic Open Source Software + + + + + +
+
Version: 0.41.3

Configuration

Platformatic Service configured with a configuration file. It supports the use +of environment variables as setting values with configuration placeholders.

Configuration file

If the Platformatic CLI finds a file in the current working directory matching +one of these filenames, it will automatically load it:

  • platformatic.service.json
  • platformatic.service.json5
  • platformatic.service.yml or platformatic.service.yaml
  • platformatic.service.tml or platformatic.service.toml

Alternatively, a --config option with a configuration +filepath can be passed to most platformatic service CLI commands.

The configuration examples in this reference use JSON.

Supported formats

FormatExtensions
JSON.json
JSON5.json5
YAML.yml, .yaml
TOML.tml

Comments are supported by the JSON5, YAML and TOML file formats.

Settings

Configuration settings are organised into the following groups:

Sensitive configuration settings, such as a database connection URL that contains +a password, should be set using configuration placeholders.

server

A required object with the following settings:

  • hostname (required, string) — Hostname where Platformatic Service server will listen for connections.

  • port (required, number) — Port where Platformatic Service server will listen for connections.

  • healthCheck (boolean or object) — Enables the health check endpoint.

    • Powered by @fastify/under-pressure.
    • The value can be an object, used to specify the interval between checks in milliseconds (default: 5000)

    Example

    {
    "server": {
    ...
    "healthCheck": {
    "interval": 2000
    }
    }
    }
  • cors (object) — Configuration for Cross-Origin Resource Sharing (CORS) headers.

    • All options will be passed to the @fastify/cors plugin. In order to specify a RegExp object, you can pass { regexp: 'yourregexp' }, +it will be automatically converted.
  • logger (object) -- the logger configuration.

  • pluginTimeout (integer) -- the number of milliseconds to wait for a Fastify plugin to load, see the fastify docs for more details.

  • https (object) - Configuration for HTTPS supporting the following options.

    • key (required, string, object, or array) - If key is a string, it specifies the private key to be used. If key is an object, it must have a path property specifying the private key file. Multiple keys are supported by passing an array of keys.
    • cert (required, string, object, or array) - If cert is a string, it specifies the certificate to be used. If cert is an object, it must have a path property specifying the certificate file. Multiple certificates are supported by passing an array of keys.

metrics

Configuration for a Prometheus server that will export monitoring metrics +for the current server instance. It uses fastify-metrics +under the hood.

This setting can be a boolean or an object. If set to true the Prometheus server will listen on http://0.0.0.0:9090.

Supported object properties:

  • hostname (string) — The hostname where Prometheus server will listen for connections.
  • port (number) — The port where Prometheus server will listen for connections.
  • auth (object) — Basic Auth configuration. username and password are required here +(use environment variables).

plugins

An optional object that defines the plugins loaded by Platformatic Service.

  • paths (required, array): an array of paths (string) +or an array of objects composed as follows,
    • path (string): Relative path to plugin's entry point.
    • options (object): Optional plugin options.
    • encapsulate (boolean): if the path is a folder, it instruct Platformatic to not encapsulate those plugins.
    • maxDepth (integer): if the path is a folder, it limits the depth to load the content from.
  • typescript (boolean or object): enable TypeScript compilation. A tsconfig.json file is required in the same folder.

Example

{
"plugins": {
"paths": [{
"path": "./my-plugin.js",
"options": {
"foo": "bar"
}
}]
}
}

typescript compilation options

The typescript can also be an object to customize the compilation. Here are the supported options:

  • enabled (boolean): enables compilation
  • tsConfig (string): path to the tsconfig.json file relative to the configuration
  • outDir (string): the output directory of tsconfig.json, in case tsconfig.json is not available +and and enabled is set to false (procution build)
  • flags (array of string): flags to be passed to tsc. Overrides tsConfig. +

Example:

{
"plugins": {
"paths": [{
"path": "./my-plugin.js",
"options": {
"foo": "bar"
}
}],
"typescript": {
"enabled": false,
"tsConfig": "./path/to/tsconfig.json",
"outDir": "dist"
}
}
}

watch

Disable watching for file changes if set to false. It can also be customized with the following options:

  • ignore (string[], default: null): List of glob patterns to ignore when watching for changes. If null or not specified, ignore rule is not applied. Ignore option doesn't work for typescript files.

  • allow (string[], default: ['*.js', '**/*.js']): List of glob patterns to allow when watching for changes. If null or not specified, allow rule is not applied. Allow option doesn't work for typescript files.

    Example

    {
    "watch": {
    "ignore": ["*.mjs", "**/*.mjs"],
    "allow": ["my-plugin.js", "plugins/*.js"]
    }
    }

service

Configure @platformatic/service specific settings such as graphql or openapi:

  • graphql (boolean or object, default: false) — Controls the GraphQL API interface, with optional GraphiQL UI.

    Examples

    Enables GraphQL support

    {
    "service": {
    "graphql": true
    }
    }

    Enables GraphQL support with GraphiQL

    {
    "service": {
    "graphql": {
    "graphiql": true
    }
    }
    }
  • openapi (boolean or object, default: false) — Enables OpenAPI REST support.

    • If value is an object, all OpenAPI v3 allowed properties can be passed. Also a prefix property can be passed to set the OpenAPI prefix.
    • Platformatic Service uses @fastify/swagger under the hood to manage this configuration.

    Examples

    Enables OpenAPI

    {
    "service": {
    ...
    "openapi": true
    }
    }

    Enables OpenAPI with prefix

    {
    "service": {
    "openapi": {
    "prefix": "/api"
    }
    }
    }

    Enables OpenAPI with options

    {
    "service": {
    "openapi": {
    "info": {
    "title": "Platformatic Service",
    "description": "Exposing a SQL database as REST"
    }
    }
    }
    }

telemetry

Open Telemetry is optionally supported with these settings:

  • serviceName (required, string) — Name of the service as will be reported in open telemetry.
  • version (string) — Optional version (free form)
  • skip (array). Optional list of operations to skip when exporting telemetry defined object with properties:
    • method: GET, POST, PUT, DELETE, PATCH, HEAD, OPTIONS, TRACE
    • path. e.g.: /documentation/json
  • exporter (object or array) — Exporter configuration. If not defined, the exporter defaults to console. If an array of objects is configured, every object must be a valid exporter object. The exporter object has the following properties:
    • type (string) — Exporter type. Supported values are console, otlp, zipkin and memory (default: console). memory is only supported for testing purposes.
    • options (object) — These options are supported:
      • url (string) — The URL to send the telemetry to. Required for otlp exporter. This has no effect on console and memory exporters.
      • headers (object) — Optional headers to send with the telemetry. This has no effect on console and memory exporters.

Note that OTLP traces can be consumed by different solutions, like Jaeger. Here the full list.

Example

{
"telemetry": {
"serviceName": "test-service",
"exporter": {
"type": "otlp",
"options": {
"url": "http://localhost:4318/v1/traces"
}
}
}
}

Environment variable placeholders

The value for any configuration setting can be replaced with an environment variable +by adding a placeholder in the configuration file, for example {PLT_SERVER_LOGGER_LEVEL}.

All placeholders in a configuration must be available as an environment variable +and must meet the allowed placeholder name rules.

Example

platformatic.service.json
{
"server": {
"port": "{PORT}"
}
}

Platformatic will replace the placeholders in this example with the environment +variables of the same name.

Setting environment variables

If a .env file exists it will automatically be loaded by Platformatic using +dotenv. For example:

.env
PLT_SERVER_LOGGER_LEVEL=info
PORT=8080

The .env file must be located in the same folder as the Platformatic configuration +file or in the current working directory.

Environment variables can also be set directly on the commmand line, for example:

PLT_SERVER_LOGGER_LEVEL=debug npx platformatic service

Allowed placeholder names

Only placeholder names prefixed with PLT_, or that are in this allow list, will be +dynamically replaced in the configuration file:

  • PORT
  • DATABASE_URL

This restriction is to avoid accidentally exposing system environment variables. +An error will be raised by Platformatic if it finds a configuration placeholder +that isn't allowed.

The default allow list can be extended by passing a --allow-env CLI option with a +comma separated list of strings, for example:

npx platformatic service --allow-env=HOST,SERVER_LOGGER_LEVEL

If --allow-env is passed as an option to the CLI, it will be merged with the +default allow list.

+ + + + \ No newline at end of file diff --git a/docs/0.41.3/reference/service/introduction/index.html b/docs/0.41.3/reference/service/introduction/index.html new file mode 100644 index 00000000000..eed8e9419e6 --- /dev/null +++ b/docs/0.41.3/reference/service/introduction/index.html @@ -0,0 +1,21 @@ + + + + + +Platformatic Service | Platformatic Open Source Software + + + + + +
+
Version: 0.41.3

Platformatic Service

Platformatic Service is an HTTP server that provides a developer tools for +building robust APIs with Node.js.

For a high level overview of how Platformatic DB works, please reference the +Architecture guide.

info

Platformatic Service is currently in public beta.

Features

Public beta

Platformatic Service is in public beta. You can use it in production, but it's quite +likely that you'll encounter significant bugs.

If you run into a bug or have a suggestion for improvement, please +raise an issue on GitHub.

Standalone usage

If you're only interested in the features available in Platformatic Service, you can simply switch platformatic with @platformatic/service in the dependencies of your package.json, so that you'll only import fewer deps.

You can use the plt-service command, it's the equivalent of plt service.

+ + + + \ No newline at end of file diff --git a/docs/0.41.3/reference/service/plugin/index.html b/docs/0.41.3/reference/service/plugin/index.html new file mode 100644 index 00000000000..04db576d4aa --- /dev/null +++ b/docs/0.41.3/reference/service/plugin/index.html @@ -0,0 +1,21 @@ + + + + + +Plugin | Platformatic Open Source Software + + + + + +
+
Version: 0.41.3

Plugin

If you want to add features to a service, you will need to register a plugin, which will be in the form of a standard Fastify plugin.

The config file will specify where the plugin file is located as the example below:

{
...
"plugins": {
"paths": ["./plugin/index.js"]
}
}

The path is relative to the config file path.

You should export an async function which receives a parameters

  • app (FastifyInstance) that is the main fastify instance
  • opts all the options specified in the config file after path

Hot Reload

Plugin file is being watched by fs.watch function.

You don't need to reload Platformatic Service server while working on your plugin. Every time you save, the watcher will trigger a reload event and the server will auto-restart and load your updated code.

tip

At this time, on Linux, file watch in subdirectories is not supported due to a Node.js limitation (documented here).

Directories

The path can also be a directory. In that case, the directory will be loaded with @fastify/autoload.

Consider the following directory structure:

├── routes
│ ├── foo
│ │ ├── something.js
│ │ └── bar
│ │ └── baz.js
│ ├── single-plugin
│ │ └── utils.js
│ └── another-plugin.js
└── platformatic.service.json

By default the folder will be added as a prefix to all the routes defined within them. +See the autoload documentation for all the options to customize this behavior.

Multiple plugins

Multiple plugins can be loaded in parallel by specifying an array:

{
...
"plugins": {
"paths": [{
"path": "./plugin/index.js"
}, {
"path": "./routes/"
}]
}
}

TypeScript and Autocompletion

In order to provide the correct typings of the features added by Platformatic Service to your Fastify instance, +add the following at the top of your files:

/// <references types="@platformatic/service" />

Plugin definition with TypeScript

Here is an example of writing a plugin in TypeScript:

/// <reference types="@platformatic/service" />
import { FastifyInstance, FastifyPluginOptions } from 'fastify'

export default async function (fastify: FastifyInstance, opts: FastifyPluginOptions) {
}

Note that you need to add the "typescript": true configuration to your platformatic.service.json.

Loading compiled files

Setting "typescript": false but including a tsconfig.json with an outDir +option, will instruct Platformatic Service to try loading your plugins from that folder instead. +This setup is needed to support pre-compiled sources to reduce cold start time during deployment.

+ + + + \ No newline at end of file diff --git a/docs/0.41.3/reference/service/programmatic/index.html b/docs/0.41.3/reference/service/programmatic/index.html new file mode 100644 index 00000000000..c01fc931b28 --- /dev/null +++ b/docs/0.41.3/reference/service/programmatic/index.html @@ -0,0 +1,19 @@ + + + + + +Programmatic API | Platformatic Open Source Software + + + + + +
+
Version: 0.41.3

Programmatic API

In many cases it's useful to start Platformatic Service using an API instead of +command line, e.g. in tests we want to start and stop our server.

The buildServer function allows that:

import { buildServer } from '@platformatic/service'

const app = await buildServer('path/to/platformatic.service.json')

await app.start()

const res = await fetch(app.url)
console.log(await res.json())

// do something

await app.close()

It is also possible to customize the configuration:

import { buildServer } from '@platformatic/service'

const app = await buildServer({
server: {
hostname: '127.0.0.1',
port: 0
}
})

await app.start()

const res = await fetch(app.url)
console.log(await res.json())

// do something

await app.close()

Creating a reusable application on top of Platformatic Service

Platformatic DB is built on top of Platformatic Serivce. +If you want to build a similar kind of tool, follow this example:

import { buildServer, schema } from '@platformatic/service'

async function myPlugin (app, opts) {
// app.platformatic.configManager contains an instance of the ConfigManager
console.log(app.platformatic.configManager.current)

await platformaticService(app, opts)
}

// break Fastify encapsulation
myPlugin[Symbol.for('skip-override')] = true
myPlugin.configType = 'myPlugin'

// This is the schema for this reusable application configuration file,
// customize at will but retain the base properties of the schema from
// @platformatic/service
myPlugin.schema = schema

// The configuration of the ConfigManager
myPlugin.configManagerConfig = {
schema: foo.schema,
envWhitelist: ['PORT', 'HOSTNAME'],
allowToWatch: ['.env'],
schemaOptions: {
useDefaults: true,
coerceTypes: true,
allErrors: true,
strict: false
},
async transformConfig () {
console.log(this.current) // this is the current config

// In this method you can alter the configuration before the application
// is started. It's useful to apply some defaults that cannot be derived
// inside the schema, such as resolving paths.
}
}


const server = await buildServer('path/to/config.json', myPlugin)

await server.start()

const res = await fetch(server.listeningOrigin)
console.log(await res.json())

// do something

await service.close()
+ + + + \ No newline at end of file diff --git a/docs/0.41.3/reference/sql-events/fastify-plugin/index.html b/docs/0.41.3/reference/sql-events/fastify-plugin/index.html new file mode 100644 index 00000000000..f41952cd5a7 --- /dev/null +++ b/docs/0.41.3/reference/sql-events/fastify-plugin/index.html @@ -0,0 +1,19 @@ + + + + + +Fastify Plugin | Platformatic Open Source Software + + + + + +
+
Version: 0.41.3

Fastify Plugin

The @platformatic/sql-events package exports a Fastify plugin that can be used out-of the box in a server application. +It requires that @platformatic/sql-mapper is registered before it.

The plugin has the following options:

The plugin adds the following properties to the app.platformatic object:

  • mq — an instance of mqemitter
  • subscribe(topics) — a method to create a node Readable +that will contain the events emitted by those topics.

Each entities of app.platformatic.entities will be augmented with two functions:

  • entity.getPublishTopic({ ctx, data, action })
  • entity.getSubscriptionTopic({ ctx, action })

Where ctx is the GraphQL Context, data is the object that will be emitted and action is either save or delete.

Usage

'use strict'

const Fastify = require('fastify')
const mapper = require('@platformatic/sql-mapper')
const events = require('@platformatic/sql-events')

async function main() {
const app = Fastify({
logger: {
level: 'info'
}
})
app.register(mapper.plugin, {
connectionString: 'postgres://postgres:postgres@127.0.0.1/postgres'
})

app.register(events)

// setup your routes


await app.listen({ port: 3333 })
}

main()
+ + + + \ No newline at end of file diff --git a/docs/0.41.3/reference/sql-events/introduction/index.html b/docs/0.41.3/reference/sql-events/introduction/index.html new file mode 100644 index 00000000000..4b0be5ed639 --- /dev/null +++ b/docs/0.41.3/reference/sql-events/introduction/index.html @@ -0,0 +1,21 @@ + + + + + +Introduction to the sql-events module | Platformatic Open Source Software + + + + + +
+
Version: 0.41.3

Introduction to the sql-events module

The Platformatic DB sql-events uses mqemitter to publish events when entities are saved and deleted.

These events are useful to distribute updates to clients, e.g. via WebSocket, Server-Sent Events, or GraphQL Subscritions. +When subscribing and using a multi-process system with a broker like Redis, a subscribed topic will receive the data from all +the other processes.

They are not the right choice for executing some code whenever an entity is created, modified or deleted, in that case +use @platformatic/sql-mapper hooks.

Install

You can use together with @platformatic/sql-mapper.

npm i @platformatic/sql-mapper @platformatic/sql-events

Usage

const { connect } = require('@platformatic/sql-mapper')
const { setupEmitter } = require('@platformatic/sql-events')
const { pino } = require('pino')

const log = pino()

async function onDatabaseLoad (db, sql) {
await db.query(sql`CREATE TABLE pages (
id SERIAL PRIMARY KEY,
title VARCHAR(255) NOT NULL
);`)
}
const connectionString = 'postgres://postgres:postgres@localhost:5432/postgres'
const mapper = await connect({
connectionString,
log,
onDatabaseLoad,
ignore: {},
hooks: {
Page: {
find: async function(_find, opts) {
console.log('hook called');
return await _find(opts)
}
}
}
})

setupEmitter({ mapper, log })

const pageEntity = mapper.entities.page

const queue = await mapper.subscribe([
pageEntity.getSubscriptionTopic({ action: 'save' }),
pageEntity.getSubscriptionTopic({ action: 'delete' })
])

const page = await pageEntity.save({
input: { title: 'fourth page' }
})

const page2 = await pageEntity.save({
input: {
id: page.id,
title: 'fifth page'
}
})

await pageEntity.delete({
where: {
id: {
eq: page.id
}
},
fields: ['id', 'title']
})

for await (const ev of queue) {
console.log(ev)
if (expected.length === 0) {
break
}
}

process.exit(0)

API

The setupEmitter function has the following options:

The setupEmitter functions adds the following properties to the mapper object:

  • mq — an instance of mqemitter
  • subscribe(topics) — a method to create a node Readable +that will contain the events emitted by those topics.

Each entities of app.platformatic.entities will be augmented with two functions:

  • entity.getPublishTopic({ ctx, data, action })
  • entity.getSubscriptionTopic({ ctx, action })

Where ctx is the GraphQL Context, data is the object that will be emitted and action is either save or delete.

+ + + + \ No newline at end of file diff --git a/docs/0.41.3/reference/sql-graphql/ignore/index.html b/docs/0.41.3/reference/sql-graphql/ignore/index.html new file mode 100644 index 00000000000..4364f278439 --- /dev/null +++ b/docs/0.41.3/reference/sql-graphql/ignore/index.html @@ -0,0 +1,17 @@ + + + + + +Ignoring types and fields | Platformatic Open Source Software + + + + + + + + + + \ No newline at end of file diff --git a/docs/0.41.3/reference/sql-graphql/introduction/index.html b/docs/0.41.3/reference/sql-graphql/introduction/index.html new file mode 100644 index 00000000000..db465af6eda --- /dev/null +++ b/docs/0.41.3/reference/sql-graphql/introduction/index.html @@ -0,0 +1,21 @@ + + + + + +Introduction to the GraphQL API | Platformatic Open Source Software + + + + + +
+
Version: 0.41.3

Introduction to the GraphQL API

The Platformatic DB GraphQL plugin starts a GraphQL server wand makes it available +via a /graphql endpoint. This endpoint is automatically ready to run queries and +mutations against your entities. This functionality is powered by +Mercurius.

GraphiQL

The GraphiQL web UI is integrated into +Platformatic DB. To enable it you can pass an option to the sql-graphql plugin:

app.register(graphqlPlugin, { graphiql: true })

The GraphiQL interface is made available under the /graphiql path.

+ + + + \ No newline at end of file diff --git a/docs/0.41.3/reference/sql-graphql/many-to-many/index.html b/docs/0.41.3/reference/sql-graphql/many-to-many/index.html new file mode 100644 index 00000000000..7b6bf96c72a --- /dev/null +++ b/docs/0.41.3/reference/sql-graphql/many-to-many/index.html @@ -0,0 +1,20 @@ + + + + + +Many To Many Relationship | Platformatic Open Source Software + + + + + +
+
Version: 0.41.3

Many To Many Relationship

Many-to-Many relationship lets you relate each row in one table to many rows in +another table and vice versa.

Many-to-many relationship are implemented in SQL via a "join table", a table whose primary key +is composed by the identifier of the two parts of the many-to-many relationship.

Platformatic DB fully support many-to-many relationships on all supported databases.

Example

Consider the following schema (SQLite):

CREATE TABLE pages (
id INTEGER PRIMARY KEY,
the_title VARCHAR(42)
);

CREATE TABLE users (
id INTEGER PRIMARY KEY,
username VARCHAR(255) NOT NULL
);

CREATE TABLE editors (
page_id INTEGER NOT NULL,
user_id INTEGER NOT NULL,
role VARCHAR(255) NOT NULL,
CONSTRAINT fk_editor_pages FOREIGN KEY (page_id) REFERENCES pages(id),
CONSTRAINT fk_editor_users FOREIGN KEY (user_id) REFERENCES users(id),
PRIMARY KEY (page_id, user_id)
);

The table editors is a "join table" between users and pages. +Given this schema, you could issue queries like:

query {
editors(orderBy: { field: role, direction: DESC }) {
user {
id
username
}
page {
id
theTitle
}
role
}
}

Mutation works exactly the same as before:

mutation {
saveEditor(input: { userId: "1", pageId: "1", role: "captain" }) {
user {
id
username
}
page {
id
theTitle
}
role
}
}
+ + + + \ No newline at end of file diff --git a/docs/0.41.3/reference/sql-graphql/mutations/index.html b/docs/0.41.3/reference/sql-graphql/mutations/index.html new file mode 100644 index 00000000000..492410e3be9 --- /dev/null +++ b/docs/0.41.3/reference/sql-graphql/mutations/index.html @@ -0,0 +1,20 @@ + + + + + +Mutations | Platformatic Open Source Software + + + + + +
+
Version: 0.41.3

Mutations

When the GraphQL plugin is loaded, some mutations are automatically adding to +the GraphQL schema.

save[ENTITY]

Saves a new entity to the database or updates an existing entity. +This actually behaves as an upsert, allowing both behaviours depending on the presence of the primary key field.

Example

'use strict'

const Fastify = require('fastify')
const graphqlPlugin = require('@platformatic/sql-graphql')
const sqlMapper = require('@platformatic/sql-mapper')

async function main() {
const app = Fastify({
logger: {
level: 'info'
}
})
app.register(sqlMapper, {
connectionString: 'postgres://postgres:postgres@127.0.0.1/postgres',
log: logger,
})
app.register(graphqlPlugin, {
graphiql: true
})
const res = await app.inject({
method: 'POST',
url: '/graphql',
body: {
query: `
mutation {
savePage(input: { id: 3 title: "Platformatic is cool!" }) {
id
title
}
}
`
}
})
const result = await res.json()
console.log(result.data) // { savePage: { id: '3', title: 'Platformatic is cool!' } }
await app.close()
}

main()

insert[ENTITY]

Inserts a new entity in the database.

Example

'use strict'

const Fastify = require('fastify')
const graphqlPlugin = require('@platformatic/sql-graphql')
const sqlMapper = require('@platformatic/sql-mapper')

async function main() {
const app = Fastify({
logger: {
level: 'info'
}
})
app.register(sqlMapper, {
connectionString: 'postgres://postgres:postgres@127.0.0.1/postgres',
log: logger,
})
app.register(graphqlPlugin, {
graphiql: true
})
const res = await app.inject({
method: 'POST',
url: '/graphql',
body: {
query: `
mutation {
savePage(input: { title: "Platformatic is cool!" }) {
id
title
}
}
`
}
})
const result = await res.json()
console.log(result.data) // { savePage: { id: '4', title: 'Platformatic is cool!' } }
await app.close()
}

main()

delete[ENTITIES]

Deletes one or more entities from the database, based on the where clause +passed as an input to the mutation.

Example

'use strict'

const Fastify = require('fastify')
const graphqlPlugin = require('@platformatic/sql-graphql')
const sqlMapper = require('@platformatic/sql-mapper')

async function main() {
const app = Fastify({
logger: {
level: 'info'
}
})
app.register(sqlMapper, {
connectionString: 'postgres://postgres:postgres@127.0.0.1/postgres',
log: logger,
})
app.register(graphqlPlugin, {
graphiql: true
})
const res = await app.inject({
method: 'POST',
url: '/graphql',
body: {
query: `
mutation {
deletePages(where: { id: { eq: "3" } }) {
id
title
}
}
`
}
})
const result = await res.json()
console.log(result.data) // { deletePages: [ { id: '3', title: 'Platformatic is cool!' } ] }
await app.close()
}

main()
+ + + + \ No newline at end of file diff --git a/docs/0.41.3/reference/sql-graphql/queries/index.html b/docs/0.41.3/reference/sql-graphql/queries/index.html new file mode 100644 index 00000000000..a3c5f0c7d30 --- /dev/null +++ b/docs/0.41.3/reference/sql-graphql/queries/index.html @@ -0,0 +1,21 @@ + + + + + +Queries | Platformatic Open Source Software + + + + + +
+
Version: 0.41.3

Queries

A GraphQL query is automatically added to the GraphQL schema for each database +table, along with a complete mapping for all table fields.

Example

'use strict'

const Fastify = require('fastify')
const graphqlPlugin = require('@platformatic/sql-graphql')
const sqlMapper = require('@platformatic/sql-mapper')
async function main() {
const app = Fastify({
logger: {
level: 'info'
}
})
app.register(sqlMapper, {
connectionString: 'postgres://postgres:postgres@127.0.0.1/postgres'
})
app.register(graphqlPlugin, {
graphiql: true
})
const res = await app.inject({
method: 'POST',
url: '/graphql',
body: {
query: `
query{
pages{
id,
title
}
}
`
}
})
const result = await res.json()
console.log(result.data)
await app.close()
}
main()

Advanced Queries

The following additional queries are added to the GraphQL schema for each entity:

get[ENTITY]by[PRIMARY_KEY]

If you have a table pages with the field id as the primary key, you can run +a query called getPageById.

Example

...
const res = await app.inject({
method: 'POST',
url: '/graphql',
body: {
query: `
query{
getPageById(id: 3) {
id,
title
}
}
`
}
})
const result = await res.json()
console.log(result.data) // { getPageById: { id: '3', title: 'A fiction' } }

count[ENTITIES]

...
const res = await app.inject({
method: 'POST',
url: '/graphql',
body: {
query: `
query {
countPages {
total
}
}
`
}
})
const result = await res.json()
console.log(result.data) // { countMovies : { total: { 17 } }

Pagination

The Platformatic DB supports for result's pagination through input parameters: limit and offset

Example

{
users(limit:5, offset: 10) {
name
}
}

It returns 5 users starting from position 10.

Limit

By default a limit value (10) is applied to each request.

Clients can override this behavior by passing a value. +In this case the server validates the input and an error is return if exceeds the max accepted value (100).

Limit's values can be customized through configuration:

{
...
"db": {
...
"limit": {
"default": 50,
"max": 1000
}
}
}

Limit only accepts values >= 0. Otherwise an error is return.

Offset

By default offset is not applied to the request. +Clients can override this behavior by passing a value.

Offset only accepts values >= 0. Otherwise an error is return.

+ + + + \ No newline at end of file diff --git a/docs/0.41.3/reference/sql-graphql/subscriptions/index.html b/docs/0.41.3/reference/sql-graphql/subscriptions/index.html new file mode 100644 index 00000000000..9da696fbce4 --- /dev/null +++ b/docs/0.41.3/reference/sql-graphql/subscriptions/index.html @@ -0,0 +1,19 @@ + + + + + +Subscription | Platformatic Open Source Software + + + + + +
+
Version: 0.41.3

Subscription

When the GraphQL plugin is loaded, some subscriptions are automatically adding to +the GraphQL schema if the @platformatic/sql-events plugin has been previously registered.

It's possible to avoid creating the subscriptions for a given entity by adding the subscriptionIgnore config, +like so: subscriptionIgnore: ['page'].

[ENTITY]Saved

Published whenever an entity is saved, e.g. when the mutation insert[ENTITY] or save[ENTITY] are called.

[ENTITY]Deleted

Published whenever an entity is deleted, e.g. when the mutation delete[ENTITY] is called..

+ + + + \ No newline at end of file diff --git a/docs/0.41.3/reference/sql-mapper/entities/api/index.html b/docs/0.41.3/reference/sql-mapper/entities/api/index.html new file mode 100644 index 00000000000..fffcd30120e --- /dev/null +++ b/docs/0.41.3/reference/sql-mapper/entities/api/index.html @@ -0,0 +1,18 @@ + + + + + +API | Platformatic Open Source Software + + + + + +
+
Version: 0.41.3

API

A set of operation methods are available on each entity:

Returned fields

The entity operation methods accept a fields option that can specify an array of field names to be returned. If not specified, all fields will be returned.

Where clause

The entity operation methods accept a where option to allow limiting of the database rows that will be affected by the operation.

The where object's key is the field you want to check, the value is a key/value map where the key is an operator (see the table below) and the value is the value you want to run the operator against.

Platformatic operatorSQL operator
eq'='
in'IN'
nin'NOT IN'
neq'<>'
gt'>'
gte'>='
lt'<'
lte'<='
like'LIKE'

Examples

Selects row with id = 1

{
...
"where": {
id: {
eq: 1
}
}
}

Select all rows with id less than 100

{
...
"where": {
id: {
lt: 100
}
}
}

Select all rows with id 1, 3, 5 or 7

{
...
"where": {
id: {
in: [1, 3, 5, 7]
}
}
}

Where clause operations are by default combined with the AND operator. To combine them with the OR operator, use the or key.

Select all rows with id 1 or 3

{
...
"where": {
or: [
{
id: {
eq: 1
}
},
{
id: {
eq: 3
}
}
]
}
}

Select all rows with id 1 or 3 and title like 'foo%'

{
...
"where": {
or: [
{
id: {
eq: 1
}
},
{
id: {
eq: 3
}
}
],
title: {
like: 'foo%'
}
}
}

Reference

find

Retrieve data for an entity from the database.

Options

NameTypeDescription
fieldsArray of stringList of fields to be returned for each object
whereObjectWhere clause 🔗
orderByArray of ObjectObject like { field: 'counter', direction: 'ASC' }
limitNumberLimits the number of returned elements
offsetNumberThe offset to start looking for rows from

Usage

'use strict'

const { connect } = require('@platformatic/sql-mapper')
const { pino } = require('pino')
const pretty = require('pino-pretty')
const logger = pino(pretty())

async function main() {
const pgConnectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
const mapper = await connect({
connectionString: pgConnectionString,
log: logger,
})
const res = await mapper.entities.page.find({
fields: ['id', 'title',],
where: {
id: {
lt: 10
}
},
})
logger.info(res)
await mapper.db.dispose()
}
main()

count

Same as find, but only count entities.

Options

NameTypeDescription
whereObjectWhere clause 🔗

Usage

'use strict'

const { connect } = require('@platformatic/sql-mapper')
const { pino } = require('pino')
const pretty = require('pino-pretty')
const logger = pino(pretty())

async function main() {
const pgConnectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
const mapper = await connect({
connectionString: pgConnectionString,
log: logger,
})
const res = await mapper.entities.page.count({
where: {
id: {
lt: 10
}
},
})
logger.info(res)
await mapper.db.dispose()
}
main()

insert

Insert one or more entity rows in the database.

Options

NameTypeDescription
fieldsArray of stringList of fields to be returned for each object
inputsArray of ObjectEach object is a new row

Usage

'use strict'

const { connect } = require('@platformatic/sql-mapper')
const { pino } = require('pino')
const pretty = require('pino-pretty')
const logger = pino(pretty())

async function main() {
const pgConnectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
const mapper = await connect({
connectionString: pgConnectionString,
log: logger,
})
const res = await mapper.entities.page.insert({
fields: ['id', 'title' ],
inputs: [
{ title: 'Foobar' },
{ title: 'FizzBuzz' }
],
})
logger.info(res)
/**
0: {
"id": "16",
"title": "Foobar"
}
1: {
"id": "17",
"title": "FizzBuzz"
}
*/
await mapper.db.dispose()
}
main()

save

Create a new entity row in the database or update an existing one.

To update an existing entity, the id field (or equivalent primary key) must be included in the input object. +save actually behaves as an upsert, allowing both behaviours depending on the presence of the primary key field.

Options

NameTypeDescription
fieldsArray of stringList of fields to be returned for each object
inputObjectThe single row to create/update

Usage

'use strict'
const { connect } = require('@platformatic/sql-mapper')
const { pino } = require('pino')
const pretty = require('pino-pretty')
const logger = pino(pretty())

async function main() {
const connectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
const mapper = await connect({
connectionString: connectionString,
log: logger,
})
const res = await mapper.entities.page.save({
fields: ['id', 'title' ],
input: { id: 1, title: 'FizzBuzz' },
})
logger.info(res)
await mapper.db.dispose()
}
main()

delete

Delete one or more entity rows from the database, depending on the where option. Returns the data for all deleted objects.

Options

NameTypeDescription
fieldsArray of stringList of fields to be returned for each object
whereObjectWhere clause 🔗

Usage

'use strict'
const { connect } = require('@platformatic/sql-mapper')
const { pino } = require('pino')
const pretty = require('pino-pretty')
const logger = pino(pretty())

async function main() {
const connectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
const mapper = await connect({
connectionString: connectionString,
log: logger,
})
const res = await mapper.entities.page.delete({
fields: ['id', 'title',],
where: {
id: {
lt: 4
}
},
})
logger.info(res)
await mapper.db.dispose()
}
main()

updateMany

Update one or more entity rows from the database, depending on the where option. Returns the data for all updated objects.

Options

NameTypeDescription
whereObjectWhere clause 🔗
inputObjectThe new values that want to update
fieldsArray of stringList of fields to be returned for each object

Usage

'use strict'
const { connect } = require('@platformatic/sql-mapper')
const { pino } = require('pino')
const pretty = require('pino-pretty')
const logger = pino(pretty())

async function main() {
const connectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
const mapper = await connect({
connectionString: connectionString,
log: logger,
})
const res = await mapper.entities.page.updateMany({
fields: ['id', 'title',],
where: {
counter: {
gte: 30
}
},
input: {
title: 'Updated title'
}
})
logger.info(res)
await mapper.db.dispose()
}
main()

+ + + + \ No newline at end of file diff --git a/docs/0.41.3/reference/sql-mapper/entities/example/index.html b/docs/0.41.3/reference/sql-mapper/entities/example/index.html new file mode 100644 index 00000000000..5926126f1fa --- /dev/null +++ b/docs/0.41.3/reference/sql-mapper/entities/example/index.html @@ -0,0 +1,17 @@ + + + + + +Example | Platformatic Open Source Software + + + + + +
+
Version: 0.41.3

Example

Given this PostgreSQL SQL schema:

CREATE TABLE "categories" (
"id" int4 NOT NULL DEFAULT nextval('categories_id_seq'::regclass),
"name" varchar(255) NOT NULL,
PRIMARY KEY ("id")
);

CREATE TABLE "pages" (
"id" int4 NOT NULL DEFAULT nextval('pages_id_seq'::regclass),
"title" varchar(255) NOT NULL,
"category_id" int4,
"user_id" int4,
PRIMARY KEY ("id")
);

ALTER TABLE "pages" ADD FOREIGN KEY ("category_id") REFERENCES "categories"("id");

app.platformatic.entities will contain this mapping object:

{
"category": {
"name": "Category",
"singularName": "category",
"pluralName": "categories",
"primaryKey": "id",
"table": "categories",
"fields": {
"id": {
"sqlType": "int4",
"isNullable": false,
"primaryKey": true,
"camelcase": "id"
},
"name": {
"sqlType": "varchar",
"isNullable": false,
"camelcase": "name"
}
},
"camelCasedFields": {
"id": {
"sqlType": "int4",
"isNullable": false,
"primaryKey": true,
"camelcase": "id"
},
"name": {
"sqlType": "varchar",
"isNullable": false,
"camelcase": "name"
}
},
"relations": [],
"reverseRelationships": [
{
"sourceEntity": "Page",
"relation": {
"constraint_catalog": "postgres",
"constraint_schema": "public",
"constraint_name": "pages_category_id_fkey",
"table_catalog": "postgres",
"table_schema": "public",
"table_name": "pages",
"constraint_type": "FOREIGN KEY",
"is_deferrable": "NO",
"initially_deferred": "NO",
"enforced": "YES",
"column_name": "category_id",
"ordinal_position": 1,
"position_in_unique_constraint": 1,
"foreign_table_name": "categories",
"foreign_column_name": "id"
}
}
]
},
"page": {
"name": "Page",
"singularName": "page",
"pluralName": "pages",
"primaryKey": "id",
"table": "pages",
"fields": {
"id": {
"sqlType": "int4",
"isNullable": false,
"primaryKey": true,
"camelcase": "id"
},
"title": {
"sqlType": "varchar",
"isNullable": false,
"camelcase": "title"
},
"category_id": {
"sqlType": "int4",
"isNullable": true,
"foreignKey": true,
"camelcase": "categoryId"
},
"user_id": {
"sqlType": "int4",
"isNullable": true,
"camelcase": "userId"
}
},
"camelCasedFields": {
"id": {
"sqlType": "int4",
"isNullable": false,
"primaryKey": true,
"camelcase": "id"
},
"title": {
"sqlType": "varchar",
"isNullable": false,
"camelcase": "title"
},
"categoryId": {
"sqlType": "int4",
"isNullable": true,
"foreignKey": true,
"camelcase": "categoryId"
},
"userId": {
"sqlType": "int4",
"isNullable": true,
"camelcase": "userId"
}
},
"relations": [
{
"constraint_catalog": "postgres",
"constraint_schema": "public",
"constraint_name": "pages_category_id_fkey",
"table_catalog": "postgres",
"table_schema": "public",
"table_name": "pages",
"constraint_type": "FOREIGN KEY",
"is_deferrable": "NO",
"initially_deferred": "NO",
"enforced": "YES",
"column_name": "category_id",
"ordinal_position": 1,
"position_in_unique_constraint": 1,
"foreign_table_name": "categories",
"foreign_column_name": "id"
}
],
"reverseRelationships": []
}
}
+ + + + \ No newline at end of file diff --git a/docs/0.41.3/reference/sql-mapper/entities/fields/index.html b/docs/0.41.3/reference/sql-mapper/entities/fields/index.html new file mode 100644 index 00000000000..e6dbff84d2e --- /dev/null +++ b/docs/0.41.3/reference/sql-mapper/entities/fields/index.html @@ -0,0 +1,17 @@ + + + + + +Fields | Platformatic Open Source Software + + + + + +
+
Version: 0.41.3

Fields

When Platformatic DB inspects a database's schema, it creates an object for each table that contains a mapping of their fields.

These objects contain the following properties:

  • singularName: singular entity name, based on table name. Uses inflected under the hood.
  • pluralName: plural entity name (i.e 'pages')
  • primaryKey: the field which is identified as primary key.
  • table: original table name
  • fields: an object containing all fields details. Object key is the field name.
  • camelCasedFields: an object containing all fields details in camelcase. If you have a column named user_id you can access it using both userId or user_id

Fields detail

  • sqlType: The original field type. It may vary depending on the underlying DB Engine
  • isNullable: Whether the field can be null or not
  • primaryKey: Whether the field is the primary key or not
  • camelcase: The camelcased value of the field

Example

Given this SQL Schema (for PostgreSQL):

CREATE SEQUENCE IF NOT EXISTS pages_id_seq;
CREATE TABLE "public"."pages" (
"id" int4 NOT NULL DEFAULT nextval('pages_id_seq'::regclass),
"title" varchar,
"body_content" text,
"category_id" int4,
PRIMARY KEY ("id")
);

The resulting mapping object will be:

{
singularName: 'page',
pluralName: 'pages',
primaryKey: 'id',
table: 'pages',
fields: {
id: {
sqlType: 'int4',
isNullable: false,
primaryKey: true,
camelcase: 'id'
},
title: {
sqlType: 'varchar',
isNullable: true,
camelcase: 'title'
},
body_content: {
sqlType: 'text',
isNullable: true,
camelcase: 'bodyContent'
},
category_id: {
sqlType: 'int4',
isNullable: true,
foreignKey: true,
camelcase: 'categoryId'
}
}
camelCasedFields: {
id: {
sqlType: 'int4',
isNullable: false,
primaryKey: true,
camelcase: 'id'
},
title: {
sqlType: 'varchar',
isNullable: true,
camelcase: 'title'
},
bodyContent: {
sqlType: 'text',
isNullable: true,
camelcase: 'bodyContent'
},
categoryId: {
sqlType: 'int4',
isNullable: true,
foreignKey: true,
camelcase: 'categoryId'
}
},
relations: []
}
+ + + + \ No newline at end of file diff --git a/docs/0.41.3/reference/sql-mapper/entities/hooks/index.html b/docs/0.41.3/reference/sql-mapper/entities/hooks/index.html new file mode 100644 index 00000000000..a0d74523658 --- /dev/null +++ b/docs/0.41.3/reference/sql-mapper/entities/hooks/index.html @@ -0,0 +1,17 @@ + + + + + +Hooks | Platformatic Open Source Software + + + + + +
+
Version: 0.41.3

Hooks

Entity hooks are a way to wrap the API methods for an entity and add custom behaviour.

The Platformatic DB SQL Mapper provides an addEntityHooks(entityName, spec) function that can be used to add hooks for an entity.

How to use hooks

addEntityHooks accepts two arguments:

  1. A string representing the entity name (singularized), for example 'page'.
  2. A key/value object where the key is one of the API methods (find, insert, save, delete) and the value is a callback function. The callback will be called with the original API method and the options that were passed to that method. See the example below.

Usage

'use strict'
const { connect } = require('@platformatic/sql-mapper')
const { pino } = require('pino')
const pretty = require('pino-pretty')
const logger = pino(pretty())

async function main() {
const pgConnectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
const mapper = await connect({
connectionString: pgConnectionString,
log: logger,
})
mapper.addEntityHooks('page', {
find: async (originalFind, opts) => {
// Add a `foo` field with `bar` value to each row
const res = await originalFind(opts)
return res.map((row) => {
row.foo = 'bar'
return row
})
}
})
const res = await mapper.entities.page.find({
fields: ['id', 'title',],
where: {
id: {
lt: 10
}
},
})
logger.info(res)
/**
[
0: {
"id": "5",
"title": "Page 1",
"foo": "bar"
},
1: {
"id": "6",
"title": "Page 2",
"foo": "bar"
}
]
*/
await mapper.db.dispose()
}
main()

Multiple Hooks

Multiple hooks can be added for the same entity and API method, for example:

'use strict'
const { connect } = require('@platformatic/sql-mapper')
const { pino } = require('pino')
const pretty = require('pino-pretty')
const logger = pino(pretty())

async function main() {
const pgConnectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
const mapper = await connect({
connectionString: pgConnectionString,
log: logger,
})
mapper.addEntityHooks('page', {
find: async function firstHook(previousFunction, opts) {
// Add a `foo` field with `bar` value to each row
const res = await previousFunction(opts)
return res.map((row) => {
row.foo = 'bar'
return row
})
}
})
mapper.addEntityHooks('page', {
find: async function secondHook(previousFunction, opts) {
// Add a `bar` field with `baz` value to each row
const res = await previousFunction(opts)
return res.map((row) => {
row.bar = 'baz'
return row
})
}
})
const res = await mapper.entities.page.find({
fields: ['id', 'title',],
where: {
id: {
lt: 10
}
},
})
logger.info(res)
/**
[
0: {
"id": "5",
"title": "Page 1",
"foo": "bar",
"bar": "baz"
},
1: {
"id": "6",
"title": "Page 2",
"foo": "bar",
"bar": "baz"
}
]
*/
await mapper.db.dispose()
}
main()

Since hooks are wrappers, they are being called in reverse order, like the image below

Hooks Lifecycle

So even though we defined two hooks, the Database will be hit only once.

Query result will be processed by firstHook, which will pass the result to secondHook, which will, finally, send the processed result to the original .find({...}) function.

+ + + + \ No newline at end of file diff --git a/docs/0.41.3/reference/sql-mapper/entities/introduction/index.html b/docs/0.41.3/reference/sql-mapper/entities/introduction/index.html new file mode 100644 index 00000000000..9e46052f1e2 --- /dev/null +++ b/docs/0.41.3/reference/sql-mapper/entities/introduction/index.html @@ -0,0 +1,17 @@ + + + + + +Introduction to Entities | Platformatic Open Source Software + + + + + +
+
Version: 0.41.3

Introduction to Entities

The primary goal of Platformatic DB is to read a database schema and generate REST and GraphQL endpoints that enable the execution of CRUD (Create/Retrieve/Update/Delete) operations against the database.

Platformatic DB includes a mapper that reads the schemas of database tables and then generates an entity object for each table.

Platformatic DB is a Fastify application. The Fastify instance object is decorated with the platformatic property, which exposes several APIs that handle the manipulation of data in the database.

Platformatic DB populates the app.platformatic.entities object with data found in database tables.

The keys on the entities object are singularized versions of the table names — for example users becomes user, categories becomes category — and the values are a set of associated metadata and functions.

+ + + + \ No newline at end of file diff --git a/docs/0.41.3/reference/sql-mapper/entities/relations/index.html b/docs/0.41.3/reference/sql-mapper/entities/relations/index.html new file mode 100644 index 00000000000..ce5d815b090 --- /dev/null +++ b/docs/0.41.3/reference/sql-mapper/entities/relations/index.html @@ -0,0 +1,20 @@ + + + + + +Relations | Platformatic Open Source Software + + + + + +
+
Version: 0.41.3

Relations

When Platformatic DB is reading your database schema, it identifies relationships +between tables and stores metadata on them in the entity object's relations field. +This is achieved by querying the database's internal metadata.

Example

Given this PostgreSQL schema:

CREATE SEQUENCE IF NOT EXISTS categories_id_seq;

CREATE TABLE "categories" (
"id" int4 NOT NULL DEFAULT nextval('categories_id_seq'::regclass),
"name" varchar(255) NOT NULL,
PRIMARY KEY ("id")
);

CREATE SEQUENCE IF NOT EXISTS pages_id_seq;

CREATE TABLE "pages" (
"id" int4 NOT NULL DEFAULT nextval('pages_id_seq'::regclass),
"title" varchar(255) NOT NULL,
"body_content" text,
"category_id" int4,
PRIMARY KEY ("id")
);

ALTER TABLE "pages" ADD FOREIGN KEY ("category_id") REFERENCES "categories"("id");

When this code is run:

'use strict'
const { connect } = require('@platformatic/sql-mapper')
const { pino } = require('pino')
const pretty = require('pino-pretty')
const logger = pino(pretty())

async function main() {
const pgConnectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
const mapper = await connect({
connectionString: pgConnectionString,
log: logger,
})
const pageEntity = mapper.entities.page
console.log(pageEntity.relations)
await mapper.db.dispose()
}
main()

The output will be:

[
{
constraint_catalog: 'postgres',
constraint_schema: 'public',
constraint_name: 'pages_category_id_fkey',
table_catalog: 'postgres',
table_schema: 'public',
table_name: 'pages',
constraint_type: 'FOREIGN KEY',
is_deferrable: 'NO',
initially_deferred: 'NO',
enforced: 'YES',
column_name: 'category_id',
ordinal_position: 1,
position_in_unique_constraint: 1,
foreign_table_name: 'categories',
foreign_column_name: 'id'
}
]

As Platformatic DB supports multiple database engines, the contents of the +relations object will vary depending on the database being used.

The following relations fields are common to all database engines:

  • column_name — the column that stores the foreign key
  • foreign_table_name — the table hosting the related row
  • foreign_column_name — the column in foreign table that identifies the row
+ + + + \ No newline at end of file diff --git a/docs/0.41.3/reference/sql-mapper/entities/timestamps/index.html b/docs/0.41.3/reference/sql-mapper/entities/timestamps/index.html new file mode 100644 index 00000000000..c73cd742d01 --- /dev/null +++ b/docs/0.41.3/reference/sql-mapper/entities/timestamps/index.html @@ -0,0 +1,17 @@ + + + + + +Timestamps | Platformatic Open Source Software + + + + + +
+
Version: 0.41.3

Timestamps

Timestamps can be used to automatically set the created_at and updated_at fields on your entities.

Timestamps are enabled by default

Configuration

To disable timestamps, you need to set the autoTimestamp field to false in configuration file:

{
...
"db": {
"connectionString": "postgres://postgres:postgres@127.0.0.1/postgres",
"autoTimestamp": false
},
...
}

Customizing the field names

By default, the created_at and updated_at fields are used. You can customize the field names by setting the createdAt and updatedAt options in autoTimestamp field in configuration file:

{
...
"db": {
"connectionString": "postgres://postgres:postgres@127.0.0.1/postgres",
"autoTimestamp": {
"createdAt": "inserted_at",
"updatedAt": "updated_at"
}
...
}
+ + + + \ No newline at end of file diff --git a/docs/0.41.3/reference/sql-mapper/entities/transactions/index.html b/docs/0.41.3/reference/sql-mapper/entities/transactions/index.html new file mode 100644 index 00000000000..56ba6ae43c1 --- /dev/null +++ b/docs/0.41.3/reference/sql-mapper/entities/transactions/index.html @@ -0,0 +1,18 @@ + + + + + +Transactions | Platformatic Open Source Software + + + + + +
+
Version: 0.41.3

Transactions

Platformatic DB entites support transaction through the tx optional parameter. +If the tx parameter is provided, the entity will join the transaction, e.g.:


const { connect } = require('@platformatic/sql-mapper')
const logger = pino(pretty())

async function main() {
const pgConnectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
const { db, entities} = await connect({
connectionString: pgConnectionString,
log: logger,
})

const result = await db.tx(async tx => {
// these two operations will be executed in the same transaction
const authorResult = await entities.author.save({
fields: ['id', 'name'],
input: { name: 'test'},
tx
})
const res = await entities.page.save({
fields: ['title', 'authorId'],
input: { title: 'page title', authorId: authorResult.id },
tx
})
return res
})

}

Throwing an Error triggers a transaction rollback:

    try {
await db.tx(async tx => {
await entities.page.save({
input: { title: 'new page' },
fields: ['title'],
tx
})

// here we have `new page`
const findResult = await entities.page.find({ fields: ['title'], tx })

// (...)

// We force the rollback
throw new Error('rollback')
})
} catch (e) {
// rollback
}

// no 'new page' here...
const afterRollback = await entities.page.find({ fields: ['title'] })

+ + + + \ No newline at end of file diff --git a/docs/0.41.3/reference/sql-mapper/fastify-plugin/index.html b/docs/0.41.3/reference/sql-mapper/fastify-plugin/index.html new file mode 100644 index 00000000000..39a82b04567 --- /dev/null +++ b/docs/0.41.3/reference/sql-mapper/fastify-plugin/index.html @@ -0,0 +1,17 @@ + + + + + +sql-mapper Fastify Plugin | Platformatic Open Source Software + + + + + +
+
Version: 0.41.3

sql-mapper Fastify Plugin

The @platformatic/sql-mapper package exports a Fastify plugin that can be used out-of the box in a server application.

A connectionString option must be passed to connect to your database.

The plugin decorates the server with a platformatic object that has the following properties:

  • db — the DB wrapper object provided by @databases
  • sql — the SQL query mapper object provided by @databases
  • entities — all entity objects with their API methods
  • addEntityHooks — a function to add a hook to an entity API method.

The plugin also decorates the Fastify Request object with the following:

  • platformaticContext: an object with the following two properties:
    • app, the Fastify application of the given route
    • reply, the Fastify Reply instance matching that request

Usage

'use strict'

const Fastify = require('fastify')
const mapper = require('@platformatic/sql-mapper')

async function main() {
const app = Fastify({
logger: {
level: 'info'
}
})
app.register(mapper.plugin, {
connectionString: 'postgres://postgres:postgres@127.0.0.1/postgres'
})

app.get('/all-pages', async (req, reply) => {
// Optionally get the platformatic context.
// Passing this to all sql-mapper functions allow to apply
// authorization rules to the database queries (amongst other things).
const ctx = req.platformaticContext

// Will return all rows from 'pages' table
const res = await app.platformatic.entities.page.find({ ctx })
return res
})

await app.listen({ port: 3333 })
}

main()
+ + + + \ No newline at end of file diff --git a/docs/0.41.3/reference/sql-mapper/introduction/index.html b/docs/0.41.3/reference/sql-mapper/introduction/index.html new file mode 100644 index 00000000000..8fb662e90af --- /dev/null +++ b/docs/0.41.3/reference/sql-mapper/introduction/index.html @@ -0,0 +1,19 @@ + + + + + +Introduction to @platformatic/sql-mapper | Platformatic Open Source Software + + + + + +
+
Version: 0.41.3

Introduction to @platformatic/sql-mapper

@platformatic/sql-mapper is the underlining utility that Platformatic DB uses to create useful utilities to +manipulate your SQL database using JavaScript.

This module is bundled with Platformatic DB via a fastify plugin +The rest of this guide shows how to use this module directly.

Install

npm i @platformatic/sql-mapper

API

connect(opts) : Promise

It will inspect a database schema and return an object containing:

  • db — A database abstraction layer from @databases
  • sql — The SQL builder from @databases
  • entities — An object containing a key for each table found in the schema, with basic CRUD operations. See Entity Reference for details.

The valid options are:

  • connectionString — The Database connection string
  • poolSize - Maximum number of connections in the connection pool. Defaults to 10.
  • log — A logger object (like Pino)
  • onDatabaseLoad — An async function that is called after the connection is established. It will receive db and sql as parameter.
  • ignore — Object used to ignore some tables from building entities. (i.e. { 'versions': true } will ignore versions table)
  • autoTimestamp — Generate timestamp automatically when inserting/updating records.
  • hooks — For each entity name (like Page) you can customize any of the entity API function. Your custom function will receive the original function as first parameter, and then all the other parameters passed to it.

createConnectionPool(opts) : Promise

It will inspect a database schema and return an object containing:

The valid options are:

  • connectionString — The Database connection string
  • poolSize - Maximum number of connections in the connection pool. Defaults to 10.
  • log — A logger object (like Pino)

This utility is useful if you just need to connect to the db without generating any entity.

Code samples

const { connect } = require('@platformatic/sql-mapper')
const { pino } = require('pino')

const logger = pino()

async function onDatabaseLoad (db, sql) {
await db.query(sql`CREATE TABLE pages (
id SERIAL PRIMARY KEY,
title VARCHAR(255) NOT NULL
);`)
}
const connectionString =
'postgres://postgres:postgres@localhost:5432/postgres'
const mapper = await connect({
connectionString,
log: logger,
onDatabaseLoad,
ignore: {},
hooks: {
Page: {
find: async function(_find, opts) {
console.log('hook called');
return await _find(opts)
}
}
}
})
const pageEntity = mapper.entities.page

await mapper.db.query(mapper.sql`SELECT * FROM pages`)
await mapper.db.find('option1', 'option2')
+ + + + \ No newline at end of file diff --git a/docs/0.41.3/reference/sql-openapi/api/index.html b/docs/0.41.3/reference/sql-openapi/api/index.html new file mode 100644 index 00000000000..227506e6047 --- /dev/null +++ b/docs/0.41.3/reference/sql-openapi/api/index.html @@ -0,0 +1,22 @@ + + + + + +API | Platformatic Open Source Software + + + + + +
+
Version: 0.41.3

API

Each table is mapped to an entity named after table's name.

In the following reference we'll use some placeholders, but let's see an example

Example

Given this SQL executed against your database:

CREATE TABLE pages (
id SERIAL PRIMARY KEY,
title VARCHAR(255) NOT NULL,
body TEXT NOT NULL
);
  • [PLURAL_ENTITY_NAME] is pages
  • [SINGULAR_ENTITY_NAME] is page
  • [PRIMARY_KEY] is id
  • fields are id, title, body

GET and POST parameters

Some APIs needs the GET method, where parameters must be defined in the URL, or POST/PUT methods, where parameters can be defined in the http request payload.

Fields

Every API can define a fields parameter, representing the entity fields you want to get back for each row of the table. If not specified all fields are returned.

fields parameter are always sent in query string, even for POST, PUT and DELETE requests, as a comma separated value.

## `GET /[PLURAL_ENTITY_NAME]`

Return all entities matching where clause

Where clause

You can define many WHERE clauses in REST API, each clause includes a field, an operator and a value.

The field is one of the fields found in the schema.

The operator follows this table:

Platformatic operatorSQL operator
eq'='
in'IN'
nin'NOT IN'
neq'<>'
gt'>'
gte'>='
lt'<'
lte'<='

The value is the value you want to compare the field to.

For GET requests all these clauses are specified in the query string using the format where.[FIELD].[OPERATOR]=[VALUE]

Example

If you want to get the title and the body of every page where id < 15 you can make an HTTP request like this:

$ curl -X 'GET' \
'http://localhost:3042/pages/?fields=body,title&where.id.lt=15' \
-H 'accept: application/json'

Where clause operations are by default combined with the AND operator. To create an OR condition use the where.or query param.

Each where.or query param can contain multiple conditions separated by a | (pipe).

The where.or conditions are similar to the where conditions, except that they don't have the where prefix.

Example

If you want to get the posts where counter = 10 OR counter > 30 you can make an HTTP request like this:

$ curl -X 'GET' \
'http://localhost:3042/pages/?where.or=(counter.eq=10|counter.gte=30)' \
-H 'accept: application/json'

OrderBy clause

You can define the ordering of the returned rows within your REST API calls with the orderby clause using the following pattern:

?orderby.[field]=[asc | desc]

The field is one of the fields found in the schema. +The value can be asc or desc.

Example

If you want to get the pages ordered alphabetically by their titles you can make an HTTP request like this:

$ curl -X 'GET' \
'http://localhost:3042/pages?orderby.title=asc' \
-H 'accept: application/json'

Total Count

If totalCount boolean is true in query, the GET returns the total number of elements in the X-Total-Count header ignoring limit and offset (if specified).

$ curl -v -X 'GET' \
'http://localhost:3042/pages/?limit=2&offset=0&totalCount=true' \
-H 'accept: application/json'

(...)
> HTTP/1.1 200 OK
> x-total-count: 18
(...)

[{"id":1,"title":"Movie1"},{"id":2,"title":"Movie2"}]%

POST [PLURAL_ENTITY_NAME]

Creates a new row in table. Expects fields to be sent in a JSON formatted request body.

Example

$ curl -X 'POST' \
'http://localhost:3042/pages/' \
-H 'accept: application/json' \
-H 'Content-Type: application/json' \
-d '{
"title": "Hello World",
"body": "Welcome to Platformatic!"
}'

{
"id": 1,
"title": "Hello World",
"body": "Welcome to Platformatic"
}

GET [PLURAL_ENTITY_NAME]/[PRIMARY_KEY]

Returns a single row, identified by PRIMARY_KEY.

Example

$ curl -X 'GET' 'http://localhost:3042/pages/1?fields=title,body

{
"title": "Hello World",
"body": "Welcome to Platformatic"
}

POST [PLURAL_ENTITY_NAME]/[PRIMARY_KEY]

Updates a row identified by PRIMARY_KEY.

Example

$ curl -X 'POST' \
'http://localhost:3042/pages/1' \
-H 'accept: application/json' \
-H 'Content-Type: application/json' \
-d '{
"title": "Hello Platformatic!",
"body": "Welcome to Platformatic!"
}'

{
"id": 1,
"title": "Hello Platformatic!",
"body": "Welcome to Platformatic"
}

PUT [PLURAL_ENTITY_NAME]/[PRIMARY_KEY]

Same as POST [PLURAL_ENTITY_NAME]/[PRIMARY_KEY].

## `PUT [PLURAL_ENTITY_NAME]`

Updates all entities matching where clause

Example

$ curl -X 'PUT' \
'http://localhost:3042/pages?where.id.in=1,2' \
-H 'accept: application/json' \
-H 'Content-Type: application/json' \
-d '{
"title": "Updated title!",
"body": "Updated body!"
}'

[{
"id": 1,
"title": "Updated title!",
"body": "Updated body!"
},{
"id": 2,
"title": "Updated title!",
"body": "Updated body!"
}]

DELETE [PLURAL_ENTITY_NAME]/[PRIMARY_KEY]

Deletes a row identified by the PRIMARY_KEY.

Example

$ curl -X 'DELETE' 'http://localhost:3042/pages/1?fields=title'

{
"title": "Hello Platformatic!"
}

Nested Relationships

Let's consider the following SQL:

CREATE TABLE IF NOT EXISTS movies (
movie_id INTEGER PRIMARY KEY,
title TEXT NOT NULL
);
CREATE TABLE IF NOT EXISTS quotes (
id INTEGER PRIMARY KEY,
quote TEXT NOT NULL,
movie_id INTEGER NOT NULL REFERENCES movies(movie_id)
);

And:

  • [P_PARENT_ENTITY] is movies
  • [S_PARENT_ENTITY] is movie
  • [P_CHILDREN_ENTITY] is quotes
  • [S_CHILDREN_ENTITY] is quote

In this case, more APIs are available:

GET [P_PARENT_ENTITY]/[PARENT_PRIMARY_KEY]/[P_CHILDREN_ENTITY]

Given a 1-to-many relationship, where a parent entity can have many children, you can query for the children directly.

$ curl -X 'GET' 'http://localhost:3042/movies/1/quotes?fields=quote

[
{
"quote": "I'll be back"
},
{
"quote": "Hasta la vista, baby"
}
]

GET [P_CHILDREN_ENTITY]/[CHILDREN_PRIMARY_KEY]/[S_PARENT_ENTITY]

You can query for the parent directly, e.g.:

$ curl -X 'GET' 'http://localhost:3042/quotes/1/movie?fields=title

{
"title": "Terminator"
}

Many-to-Many Relationships

Many-to-Many relationship lets you relate each row in one table to many rows in +another table and vice versa.

Many-to-many relationship are implemented in SQL via a "join table", a table whose primary key +is composed by the identifier of the two parts of the many-to-many relationship.

Platformatic DB fully support many-to-many relationships on all supported database.

Let's consider the following SQL:

CREATE TABLE pages (
id INTEGER PRIMARY KEY,
the_title VARCHAR(42)
);

CREATE TABLE users (
id INTEGER PRIMARY KEY,
username VARCHAR(255) NOT NULL
);

CREATE TABLE editors (
page_id INTEGER NOT NULL,
user_id INTEGER NOT NULL,
role VARCHAR(255) NOT NULL,
CONSTRAINT fk_editor_pages FOREIGN KEY (page_id) REFERENCES pages(id),
CONSTRAINT fk_editor_users FOREIGN KEY (user_id) REFERENCES users(id),
PRIMARY KEY (page_id, user_id)
);

And:

  • [P_ENTITY] is editors
  • [P_REL_1] is pages
  • [S_REL_1] is page
  • [KEY_REL_1] is pages PRIMARY KEY: pages(id)
  • [P_REL_2] is users
  • [S_REL_2] is user
  • [KEY_REL_2] is users PRIMARY KEY: users(id)

In this case, here the APIs that are available for the join table:

GET [P_ENTITY]/[S_REL_1]/[KEY_REL_1]/[S_REL_2]/[KEY_REL_2]

This returns the entity in the "join table", e.g. GET /editors/page/1/user/1.

POST [P_ENTITY]/[S_REL_1]/[KEY_REL_1]/[S_REL_2]/[KEY_REL_2]

Creates a new entity in the "join table", e.g. POST /editors/page/1/user/1.

PUT [P_ENTITY]/[S_REL_1]/[KEY_REL_1]/[S_REL_2]/[KEY_REL_2]

Updates an entity in the "join table", e.g. PUT /editors/page/1/user/1.

DELETE [P_ENTITY]/[S_REL_1]/[KEY_REL_1]/[S_REL_2]/[KEY_REL_2]

Delete the entity in the "join table", e.g. DELETE /editors/page/1/user/1.

GET /[P_ENTITY]

See the above.

Offset only accepts values >= 0. Otherwise an error is return.

Pagination

The Platformatic DB supports for result's pagination through input parameters: limit and offset

Example

$ curl -X 'GET' 'http://localhost:3042/movies?limit=5&offset=10

[
{
"title": "Star Wars",
"movie_id": 10
},
...
{
"title": "007",
"movie_id": 14
}
]

It returns 5 movies starting from position 10.

TotalCount functionality can be used in order to evaluate if there are more pages.

Limit

By default a limit value (10) is applied to each request.

Clients can override this behavior by passing a value. +In this case the server validates the input and an error is return if exceeds the max accepted value (100).

Limit's values can be customized through configuration:

{
...
"db": {
...
"limit": {
"default": 50,
"max": 1000
}
}
}

Limit only accepts values >= 0. Otherwise an error is return.

Offset

By default offset is not applied to the request. +Clients can override this behavior by passing a value.

Offset only accepts values >= 0. Otherwise an error is return.

+ + + + \ No newline at end of file diff --git a/docs/0.41.3/reference/sql-openapi/ignore/index.html b/docs/0.41.3/reference/sql-openapi/ignore/index.html new file mode 100644 index 00000000000..e970686cbac --- /dev/null +++ b/docs/0.41.3/reference/sql-openapi/ignore/index.html @@ -0,0 +1,17 @@ + + + + + +Ignoring entities and fields | Platformatic Open Source Software + + + + + +
+
Version: 0.41.3

Ignoring entities and fields

@platformatic/sql-openapi allows to selectively ignore entities and fields.

To ignore entites:

app.register(require('@platformatic/sql-openapi'), {
ignore: {
categories: true
}
})

To ignore individual fields:

app.register(require('@platformatic/sql-openapi'), {
ignore: {
categories: {
name: true
}
}
})
+ + + + \ No newline at end of file diff --git a/docs/0.41.3/reference/sql-openapi/introduction/index.html b/docs/0.41.3/reference/sql-openapi/introduction/index.html new file mode 100644 index 00000000000..e8e00e9f874 --- /dev/null +++ b/docs/0.41.3/reference/sql-openapi/introduction/index.html @@ -0,0 +1,17 @@ + + + + + +Introduction to the REST API | Platformatic Open Source Software + + + + + +
+
Version: 0.41.3

Introduction to the REST API

The Platformatic DB OpenAPI plugin automatically starts a REST API server (powered by Fastify) that provides CRUD (Create, Read, Update, Delete) functionality for each entity.

Configuration

In the config file, under the "db" section, the OpenAPI server is enabled by default. Although you can disable it setting the property openapi to false.

Example

{
...
"db": {
"openapi": false
}
}

As Platformatic DB uses fastify-swagger under the hood, the "openapi" property can be an object that follows the OpenAPI Specification Object format.

This allows you to extend the output of the Swagger UI documentation.

+ + + + \ No newline at end of file diff --git a/docs/0.42.0/category/getting-started/index.html b/docs/0.42.0/category/getting-started/index.html new file mode 100644 index 00000000000..14468b3a41c --- /dev/null +++ b/docs/0.42.0/category/getting-started/index.html @@ -0,0 +1,17 @@ + + + + + +Getting Started | Platformatic Open Source Software + + + + + + + + + + \ No newline at end of file diff --git a/docs/0.42.0/category/guides/index.html b/docs/0.42.0/category/guides/index.html new file mode 100644 index 00000000000..e28dbeb7d64 --- /dev/null +++ b/docs/0.42.0/category/guides/index.html @@ -0,0 +1,17 @@ + + + + + +Guides | Platformatic Open Source Software + + + + + +
+
Version: 0.42.0

Guides

+ + + + \ No newline at end of file diff --git a/docs/0.42.0/category/packages/index.html b/docs/0.42.0/category/packages/index.html new file mode 100644 index 00000000000..78aeb85c6b5 --- /dev/null +++ b/docs/0.42.0/category/packages/index.html @@ -0,0 +1,17 @@ + + + + + +Packages | Platformatic Open Source Software + + + + + + + + + + \ No newline at end of file diff --git a/docs/0.42.0/category/platformatic-cloud/index.html b/docs/0.42.0/category/platformatic-cloud/index.html new file mode 100644 index 00000000000..e9a4c3bb1a3 --- /dev/null +++ b/docs/0.42.0/category/platformatic-cloud/index.html @@ -0,0 +1,17 @@ + + + + + +Platformatic Cloud | Platformatic Open Source Software + + + + + + + + + + \ No newline at end of file diff --git a/docs/0.42.0/category/reference/index.html b/docs/0.42.0/category/reference/index.html new file mode 100644 index 00000000000..777cd2ef3e1 --- /dev/null +++ b/docs/0.42.0/category/reference/index.html @@ -0,0 +1,17 @@ + + + + + +Reference | Platformatic Open Source Software + + + + + + + + + + \ No newline at end of file diff --git a/docs/0.42.0/contributing/documentation-style-guide/index.html b/docs/0.42.0/contributing/documentation-style-guide/index.html new file mode 100644 index 00000000000..c8c33483a89 --- /dev/null +++ b/docs/0.42.0/contributing/documentation-style-guide/index.html @@ -0,0 +1,74 @@ + + + + + +Documentation Style Guide | Platformatic Open Source Software + + + + + +
+
Version: 0.42.0

Documentation Style Guide

Welcome to the Platformatic Documentation Style Guide. This guide is here to provide +you with a conventional writing style for users writing developer documentation on +our Open Source framework. Each topic is precise and well explained to help you write +documentation users can easily understand and implement.

Who is This Guide For?

This guide is for anyone who loves to build with Platformatic or wants to contribute +to our documentation. You do not need to be an expert in writing technical +documentation. This guide is here to help you.

Visit CONTRIBUTING.md +file on GitHub to join our Open Source folks.

Before you Write

You should have a basic understanding of:

  • JavaScript
  • Node.js
  • Git
  • GitHub
  • Markdown
  • HTTP
  • NPM

Consider Your Audience

Before you start writing, think about your audience. In this case, your audience +should already know HTTP, JavaScript, NPM, and Node.js. It is necessary to keep +your readers in mind because they are the ones consuming your content. You want +to give as much useful information as possible. Consider the vital things they +need to know and how they can understand them. Use words and references that +readers can relate to easily. Ask for feedback from the community, it can help +you write better documentation that focuses on the user and what you want to +achieve.

Get Straight to the Point

Give your readers a clear and precise action to take. Start with what is most +important. This way, you can help them find what they need faster. Mostly, +readers tend to read the first content on a page, and many will not scroll +further.

Example

Less like this:

Colons are very important to register a parametric path. It lets +the framework know there is a new parameter created. You can place the colon +before the parameter name so the parametric path can be created.

More Like this:

To register a parametric path, put a colon before the parameter +name. Using a colon lets the framework know it is a parametric path and not a +static path.

Images and Video Should Enhance the Written Documentation

Images and video should only be added if they complement the written +documentation, for example to help the reader form a clearer mental model of a +concept or pattern.

Images can be directly embedded, but videos should be included by linking to an +external site, such as YouTube. You can add links by using +[Title](https://www.websitename.com) in the Markdown.

Avoid Plagiarism

Make sure you avoid copying other people's work. Keep it as original as +possible. You can learn from what they have done and reference where it is from +if you used a particular quote from their work.

Word Choice

There are a few things you need to use and avoid when writing your documentation +to improve readability for readers and make documentation neat, direct, and +clean.

When to use the Second Person "you" as the Pronoun

When writing articles or guides, your content should communicate directly to +readers in the second person ("you") addressed form. It is easier to give them +direct instruction on what to do on a particular topic. To see an example, visit +the Quick Start Guide.

Example

Less like this:

We can use the following plugins.

More like this:

You can use the following plugins.

According to Wikipedia, You is usually a second person pronoun. +Also, used to refer to an indeterminate person, as a more common alternative +to a very formal indefinite pronoun.

To recap, use "you" when writing articles or guides.

When to Avoid the Second Person "you" as the Pronoun

One of the main rules of formal writing such as reference documentation, or API +documentation, is to avoid the second person ("you") or directly addressing the +reader.

Example

Less like this:

You can use the following recommendation as an example.

More like this:

As an example, the following recommendations should be +referenced.

To view a live example, refer to the Decorators +reference document.

To recap, avoid "you" in reference documentation or API documentation.

Avoid Using Contractions

Contractions are the shortened version of written and spoken forms of a word, +i.e. using "don't" instead of "do not". Avoid contractions to provide a more +formal tone.

Avoid Using Condescending Terms

Condescending terms are words that include:

  • Just
  • Easy
  • Simply
  • Basically
  • Obviously

The reader may not find it easy to use Platformatic; avoid +words that make it sound simple, easy, offensive, or insensitive. Not everyone +who reads the documentation has the same level of understanding.

Starting With a Verb

Mostly start your description with a verb, which makes it simple and precise for +the reader to follow. Prefer using present tense because it is easier to read +and understand than the past or future tense.

Example

Less like this:

There is a need for Node.js to be installed before you can be +able to use Platformatic.

More like this:

Install Node.js to make use of Platformatic.

Grammatical Moods

Grammatical moods are a great way to express your writing. Avoid sounding too +bossy while making a direct statement. Know when to switch between indicative, +imperative, and subjunctive moods.

Indicative - Use when making a factual statement or question.

Example

Since there is no testing framework available, "Platformatic recommends ways +to write tests".

Imperative - Use when giving instructions, actions, commands, or when you +write your headings.

Example

Install dependencies before starting development.

Subjunctive - Use when making suggestions, hypotheses, or non-factual +statements.

Example

Reading the documentation on our website is recommended to get +comprehensive knowledge of the framework.

Use Active Voice Instead of Passive

Using active voice is a more compact and direct way of conveying your +documentation.

Example

Passive:

The node dependencies and packages are installed by npm.

Active:

npm installs packages and node dependencies.

Writing Style

Documentation Titles

When creating a new guide, API, or reference in the /docs/ directory, use +short titles that best describe the topic of your documentation. Name your files +in kebab-cases and avoid Raw or camelCase. To learn more about kebab-case you +can visit this medium article on Case +Styles.

Examples:

hook-and-plugins.md

adding-test-plugins.md

removing-requests.md

Hyperlinks should have a clear title of what it references. Here is how your +hyperlink should look:

<!-- More like this -->

// Add clear & brief description
[Fastify Plugins] (https://www.fastify.io/docs/latest/Plugins/)

<!--Less like this -->

// incomplete description
[Fastify] (https://www.fastify.io/docs/latest/Plugins/)

// Adding title in link brackets
[](https://www.fastify.io/docs/latest/Plugins/ "fastify plugin")

// Empty title
[](https://www.fastify.io/docs/latest/Plugins/)

// Adding links localhost URLs instead of using code strings (``)
[http://localhost:3000/](http://localhost:3000/)

Include in your documentation as many essential references as possible, but +avoid having numerous links when writing to avoid distractions.

+ + + + \ No newline at end of file diff --git a/docs/0.42.0/contributing/index.html b/docs/0.42.0/contributing/index.html new file mode 100644 index 00000000000..56929273e1d --- /dev/null +++ b/docs/0.42.0/contributing/index.html @@ -0,0 +1,18 @@ + + + + + +Contributing | Platformatic Open Source Software + + + + + +
+
+ + + + \ No newline at end of file diff --git a/docs/0.42.0/getting-started/architecture/index.html b/docs/0.42.0/getting-started/architecture/index.html new file mode 100644 index 00000000000..496d78e0b1b --- /dev/null +++ b/docs/0.42.0/getting-started/architecture/index.html @@ -0,0 +1,25 @@ + + + + + +Architecture | Platformatic Open Source Software + + + + + +
+
Version: 0.42.0

Architecture

Platformatic is a collection of Open Source tools designed to eliminate friction +in backend development. The first of those tools is Platformatic DB, which is developed +as @platformatic/db.

Platformatic DB

Platformatic DB can expose a SQL database by dynamically mapping it to REST/OpenAPI +and GraphQL endpoints. It supports a limited subset of the SQL query language, but +also allows developers to add their own custom routes and resolvers.

Platformatic DB Architecture

Platformatic DB is composed of a few key libraries:

  1. @platformatic/sql-mapper - follows the Data Mapper pattern to build an API on top of a SQL database. +Internally it uses the @database project.
  2. @platformatic/sql-openapi - uses sql-mapper to create a series of REST routes and matching OpenAPI definitions. +Internally it uses @fastify/swagger.
  3. @platformatic/sql-graphql - uses sql-mapper to create a GraphQL endpoint and schema. sql-graphql also support Federation. +Internally it uses mercurius.

Platformatic DB allows you to load a Fastify plugin during server startup that contains your own application-specific code. +The plugin can add more routes or resolvers — these will automatically be shown in the OpenAPI and GraphQL schemas.

SQL database migrations are also supported. They're implemented internally with the postgrator library.

+ + + + \ No newline at end of file diff --git a/docs/0.42.0/getting-started/movie-quotes-app-tutorial/index.html b/docs/0.42.0/getting-started/movie-quotes-app-tutorial/index.html new file mode 100644 index 00000000000..85318fbafa3 --- /dev/null +++ b/docs/0.42.0/getting-started/movie-quotes-app-tutorial/index.html @@ -0,0 +1,129 @@ + + + + + +Movie Quotes App Tutorial | Platformatic Open Source Software + + + + + +
+
Version: 0.42.0

Movie Quotes App Tutorial

This tutorial will help you learn how to build a full stack application on top +of Platformatic DB. We're going to build an application that allows us to +save our favourite movie quotes. We'll also be building in custom API functionality +that allows for some neat user interaction on our frontend.

You can find the complete code for the application that we're going to build +on GitHub.

note

We'll be building the frontend of our application with the Astro +framework, but the GraphQL API integration steps that we're going to cover can +be applied with most frontend frameworks.

What we're going to cover

In this tutorial we'll learn how to:

  • Create a Platformatic API
  • Apply database migrations
  • Create relationships between our API entities
  • Populate our database tables
  • Build a frontend application that integrates with our GraphQL API
  • Extend our API with custom functionality
  • Enable CORS on our Platformatic API

Prerequisites

To follow along with this tutorial you'll need to have these things installed:

You'll also need to have some experience with JavaScript, and be comfortable with +running commands in a terminal.

Build the backend

Create a Platformatic API

First, let's create our project directory:

mkdir -p tutorial-movie-quotes-app/apps/movie-quotes-api/

cd tutorial-movie-quotes-app/apps/movie-quotes-api/

Run this command in your terminal to start the Platformatic creator wizard:

npm create platformatic@latest

This interactive command-line tool will ask you some questions about how you'd +like to set up your new Platformatic project. For this guide, select these options:

- Which kind of project do you want to create?  => DB
- Where would you like to create your project? => quick-start
- Do you want to create default migrations? => Yes
- Do you want to create a plugin? => Yes
- Do you want to use TypeScript? => No
- Do you want to install dependencies? => Yes (this can take a while)
- Do you want to apply the migrations? => Yes
- Do you want to generate types? => Yes
- Do you want to create the github action to deploy this application to Platformatic Cloud dynamic workspace? => No
- Do you want to create the github action to deploy this application to Platformatic Cloud static workspace? => No

Once the wizard is complete, you'll have a Platformatic app project in the +folder quick-start, with example migration files and a plugin script.

info

Make sure you run the npm/yarn/pnpm command install command manually if you +don't ask the wizard to do it for you.

Define the database schema

Let's create a new directory to store our migration files:

mkdir migrations

Then we'll create a migration file named 001.do.sql in the migrations +directory:

CREATE TABLE quotes (
id INTEGER PRIMARY KEY,
quote TEXT NOT NULL,
said_by VARCHAR(255) NOT NULL,
created_at DATETIME DEFAULT CURRENT_TIMESTAMP
);

Now let's setup migrations in our Platformatic configuration +file, platformatic.db.json:

{
"$schema": "https://platformatic.dev/schemas/v0.23.2/db",
"server": {
"hostname": "{PLT_SERVER_HOSTNAME}",
"port": "{PORT}",
"logger": {
"level": "{PLT_SERVER_LOGGER_LEVEL}"
}
},
"db": {
"connectionString": "{DATABASE_URL}",
"graphql": true,
"openapi": true
},
"plugins": {
"paths": [
"plugin.js"
]
},
"types": {
"autogenerate": true
},
"migrations": {
"dir": "migrations",
"autoApply": true
}
}
info

Take a look at the Configuration reference +to see all the supported configuration settings.

Now we can start the Platformatic DB server:

npm run start

Our Platformatic DB server should start, and we'll see messages like these:

[11:26:48.772] INFO (15235): running 001.do.sql
[11:26:48.864] INFO (15235): server listening
url: "http://127.0.0.1:3042"

Let's open a new terminal and make a request to our server's REST API that +creates a new quote:

curl --request POST --header "Content-Type: application/json" \
-d "{ \"quote\": \"Toto, I've got a feeling we're not in Kansas anymore.\", \"saidBy\": \"Dorothy Gale\" }" \
http://localhost:3042/quotes

We should receive a response like this from the API:

{"id":1,"quote":"Toto, I've got a feeling we're not in Kansas anymore.","saidBy":"Dorothy Gale","createdAt":"1684167422600"}

Create an entity relationship

Now let's create a migration file named 002.do.sql in the migrations +directory:

CREATE TABLE movies (
id INTEGER PRIMARY KEY,
name TEXT NOT NULL UNIQUE
);

ALTER TABLE quotes ADD COLUMN movie_id INTEGER REFERENCES movies(id);

This SQL will create a new movies database table and also add a movie_id +column to the quotes table. This will allow us to store movie data in the +movies table and then reference them by ID in our quotes table.

Let's stop the Platformatic DB server with Ctrl + C, and then start it again:

npm run start

The new migration should be automatically applied and we'll see the log message +running 002.do.sql.

Our Platformatic DB server also provides a GraphQL API. Let's open up the GraphiQL +application in our web browser:

http://localhost:3042/graphiql

Now let's run this query with GraphiQL to add the movie for the quote that we +added earlier:

mutation {
saveMovie(input: { name: "The Wizard of Oz" }) {
id
}
}

We should receive a response like this from the API:

{
"data": {
"saveMovie": {
"id": "1"
}
}
}

Now we can update our quote to reference the movie:

mutation {
saveQuote(input: { id: 1, movieId: 1 }) {
id
quote
saidBy
createdAt
movie {
id
name
}
}
}

We should receive a response like this from the API:

{
"data": {
"saveQuote": {
"id": "1",
"quote": "Toto, I've got a feeling we're not in Kansas anymore.",
"saidBy": "Dorothy Gale",
"movie": {
"id": "1",
"name": "The Wizard of Oz"
}
}
}
}

Our Platformatic DB server has automatically identified the relationship +between our quotes and movies database tables. This allows us to make +GraphQL queries that retrieve quotes and their associated movies at the same +time. For example, to retrieve all quotes from our database we can run:

query {
quotes {
id
quote
saidBy
createdAt
movie {
id
name
}
}
}

To view the GraphQL schema that's generated for our API by Platformatic DB, +we can run this command in our terminal:

npx platformatic db schema graphql

The GraphQL schema shows all of the queries and mutations that we can run +against our GraphQL API, as well as the types of data that it expects as input.

Populate the database

Our movie quotes database is looking a little empty! We're going to create a +"seed" script to populate it with some data.

Let's create a new file named seed.js and copy and paste in this code:

'use strict'

const quotes = [
{
quote: "Toto, I've got a feeling we're not in Kansas anymore.",
saidBy: 'Dorothy Gale',
movie: 'The Wizard of Oz'
},
{
quote: "You're gonna need a bigger boat.",
saidBy: 'Martin Brody',
movie: 'Jaws'
},
{
quote: 'May the Force be with you.',
saidBy: 'Han Solo',
movie: 'Star Wars'
},
{
quote: 'I have always depended on the kindness of strangers.',
saidBy: 'Blanche DuBois',
movie: 'A Streetcar Named Desire'
}
]

module.exports = async function ({ entities, db, sql }) {
for (const values of quotes) {
const movie = await entities.movie.save({ input: { name: values.movie } })

console.log('Created movie:', movie)

const quote = {
quote: values.quote,
saidBy: values.saidBy,
movieId: movie.id
}

await entities.quote.save({ input: quote })

console.log('Created quote:', quote)
}
}
info

Take a look at the Seed a Database guide to learn more +about how database seeding works with Platformatic DB.

Let's stop our Platformatic DB server running and remove our SQLite database:

rm db.sqlite

Now let's create a fresh SQLite database by running our migrations:

npx platformatic db migrations apply

And then let's populate the quotes and movies tables with data using our +seed script:

npx platformatic db seed seed.js

Our database is full of data, but we don't have anywhere to display it. It's +time to start building our frontend!

Build the frontend

We're now going to use Astro to build our frontend +application. If you've not used it before, you might find it helpful +to read this overview +on how Astro components are structured.

tip

Astro provide some extensions and tools to help improve your +Editor Setup when building an +Astro application.

Create an Astro application

In the root tutorial-movie-quotes-app of our project, let's create a new directory for our frontent +application:

mkdir -p apps/movie-quotes-frontend/

cd apps/movie-quotes-frontend/

And then we'll create a new Astro project:

npm create astro@latest -- --template basics

It will ask you some questions about how you'd like to set up +your new Astro project. For this guide, select these options:

Where should we create your new project?

   .
◼ tmpl Using basics as project template
✔ Template copied

Install dependencies? (it's buggy, we'll do it afterwards)

   No
◼ No problem! Remember to install dependencies after setup.

Do you plan to write TypeScript?

   No
◼ No worries! TypeScript is supported in Astro by default, but you are free to continue writing JavaScript instead.

Initialize a new git repository?

   No
◼ Sounds good! You can always run git init manually.

Liftoff confirmed. Explore your project!
Run npm dev to start the dev server. CTRL+C to stop.
Add frameworks like react or tailwind using astro add.

Now we'll edit our Astro configuration file, astro.config.mjs and +copy and paste in this code:

import { defineConfig } from 'astro/config'

// https://astro.build/config
export default defineConfig({
output: 'server'
})

And we'll also edit our tsconfig.json file and add in this configuration:

{
"extends": "astro/tsconfigs/base",
"compilerOptions": {
"types": ["astro/client"]
}
}

Now we can start up the Astro development server with:

npm run dev

And then load up the frontend in our browser at http://localhost:3000

Now that everything is working, we'll remove all default *.astro files from the src/ directory, but we'll keep the directory structure. You can delete them now, or override them later.

Create a layout

In the src/layouts directory, let's create a new file named Layout.astro:

---
export interface Props {
title: string;
page?: string;
}
const { title, page } = Astro.props;
---

<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width" />
<title>{title}</title>
</head>
<body>
<header>
<h1>🎬 Movie Quotes</h1>
</header>
<nav>
<a href="/">All quotes</a>
</nav>
<section>
<slot />
</section>
</body>
</html>

The code between the --- is known as the component script, and the +code after that is the component template. The component script will only run +on the server side when a web browser makes a request. The component template +is rendered server side and sent back as an HTML response to the web browser.

Now we'll update src/pages/index.astro to use this Layout component. +Let's replace the contents of src/pages/index.astro with this code:

---
import Layout from '../layouts/Layout.astro';
---

<Layout title="All quotes" page="listing">
<main>
<p>We'll list all the movie quotes here.</p>
</main>
</Layout>

Integrate the urql GraphQL client

We're now going to integrate the URQL +GraphQL client into our frontend application. This will allow us to run queries +and mutations against our Platformatic GraphQL API.

Let's first install @urql/core and +graphql as project dependencies:

npm install @urql/core graphql

Then let's create a new .env file and add this configuration:

PUBLIC_GRAPHQL_API_ENDPOINT=http://127.0.0.1:3042/graphql

Now we'll create a new directory:

mkdir src/lib

And then create a new file named src/lib/quotes-api.js. In that file we'll +create a new URQL client:

// src/lib/quotes-api.js

import { createClient, cacheExchange, fetchExchange } from '@urql/core';

const graphqlClient = createClient({
url: import.meta.env.PUBLIC_GRAPHQL_API_ENDPOINT,
requestPolicy: "network-only",
exchanges: [cacheExchange, fetchExchange]
});

We'll also add a thin wrapper around the client that does some basic error +handling for us:

// src/lib/quotes-api.js

async function graphqlClientWrapper(method, gqlQuery, queryVariables = {}) {
const queryResult = await graphqlClient[method](
gqlQuery,
queryVariables
).toPromise();

if (queryResult.error) {
console.error("GraphQL error:", queryResult.error);
}

return {
data: queryResult.data,
error: queryResult.error,
};
}

export const quotesApi = {
async query(gqlQuery, queryVariables = {}) {
return await graphqlClientWrapper("query", gqlQuery, queryVariables);
},
async mutation(gqlQuery, queryVariables = {}) {
return await graphqlClientWrapper("mutation", gqlQuery, queryVariables);
}
}

And lastly, we'll export gql from the @urql/core package, to make it +simpler for us to write GraphQL queries in our pages:

// src/lib/quotes-api.js

export { gql } from "@urql/core";

Stop the Astro dev server and then start it again so it picks up the .env +file:

npm run dev

Display all quotes

Let's display all the movie quotes in src/pages/index.astro.

First, we'll update the component script at the top and add in a query to +our GraphQL API for quotes:

---
import Layout from '../layouts/Layout.astro';
import { quotesApi, gql } from '../lib/quotes-api';

const { data } = await quotesApi.query(gql`
query {
quotes {
id
quote
saidBy
createdAt
movie {
id
name
}
}
}
`);

const quotes = data?.quotes || [];
---

Then we'll update the component template to display the quotes:

<Layout title="All quotes" page="listing">
<main>
{quotes.length > 0 ? quotes.map((quote) => (
<div>
<blockquote>
<p>{quote.quote}</p>
</blockquote>
<p>
{quote.saidBy}, {quote.movie?.name}
</p>
<div>
<span>Added {new Date(Number(quote.createdAt)).toUTCString()}</span>
</div>
</div>
)) : (
<p>No movie quotes have been added.</p>
)}
</main>
</Layout>

And just like that, we have all the movie quotes displaying on the page!

Integrate Tailwind for styling

Automatically add the @astrojs/tailwind integration:

npx astro add tailwind --yes

Add the Tailwind CSS Typography +and Forms plugins:

npm install --save-dev @tailwindcss/typography @tailwindcss/forms

Import the plugins in our Tailwind configuration file:

// tailwind.config.cjs

/** @type {import('tailwindcss').Config} */
module.exports = {
content: ['./src/**/*.{astro,html,js,jsx,md,mdx,svelte,ts,tsx,vue}'],
theme: {
extend: {}
},
plugins: [
require('@tailwindcss/forms'),
require('@tailwindcss/typography')
]
}

Stop the Astro dev server and then start it again so it picks up all the +configuration changes:

npm run dev

Style the listing page

To style our listing page, let's add CSS classes to the component template in +src/layouts/Layout.astro:

---
export interface Props {
title: string;
page?: string;
}

const { title, page } = Astro.props;

const navActiveClasses = "font-bold bg-yellow-400 no-underline";
---

<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width" />
<title>{title}</title>
</head>
<body class="py-8">
<header class="prose mx-auto mb-6">
<h1>🎬 Movie Quotes</h1>
</header>
<nav class="prose mx-auto mb-6 border-y border-gray-200 flex">
<a href="/" class={`p-3 ${page === "listing" && navActiveClasses}`}>All quotes</a>
</nav>
<section class="prose mx-auto">
<slot />
</section>
</body>
</html>

Then let's add CSS classes to the component template in src/pages/index.astro:

<Layout title="All quotes">
<main>
{quotes.length > 0 ? quotes.map((quote) => (
<div class="border-b mb-6">
<blockquote class="text-2xl mb-0">
<p class="mb-4">{quote.quote}</p>
</blockquote>
<p class="text-xl mt-0 mb-8 text-gray-400">
{quote.saidBy}, {quote.movie?.name}
</p>
<div class="flex flex-col mb-6 text-gray-400">
<span class="text-gray-400 italic">Added {new Date(Number(quote.createdAt)).toUTCString()}</span>
</div>
</div>
)) : (
<p>No movie quotes have been added.</p>
)}
</main>
</Layout>

Our listing page is now looking much more user friendly!

Create an add quote page

We're going to create a form component that we can use for adding and editing +quotes.

First let's create a new component file, src/components/QuoteForm.astro:

---
export interface QuoteFormData {
id?: number;
quote?: string;
saidBy?: string;
movie?: string;
}

export interface Props {
action: string;
values?: QuoteFormData;
saveError?: boolean;
loadError?: boolean;
submitLabel: string;
}

const { action, values = {}, saveError, loadError, submitLabel } = Astro.props;
---

{saveError && <p class="text-lg bg-red-200 p-4">There was an error saving the quote. Please try again.</p>}
{loadError && <p class="text-lg bg-red-200 p-4">There was an error loading the quote. Please try again.</p>}

<form method="post" action={action} class="grid grid-cols-1 gap-6">
<label for="quote" class="block">
<span>Quote</span>
<textarea id="quote" name="quote" required="required" class="mt-1 w-full">{values.quote}</textarea>
</label>
<label for="said-by" class="block">
<span>Said by</span>
<input type="text" id="said-by" name="saidBy" required="required" value={values.saidBy} class="mt-1 w-full">
</label>
<label for="movie" class="block">
<span>Movie</span>
<input type="text" id="movie" name="movie" required="required" autocomplete="off" value={values.movie} class="form-input mt-1 w-full">
</label>
<input type="submit" value={submitLabel} disabled={loadError && "disabled"} class="bg-yellow-400 hover:bg-yellow-500 text-gray-900 round p-3" />
</form>

Create a new page file, src/pages/add.astro:

---
import Layout from '../layouts/Layout.astro';
import QuoteForm from '../components/QuoteForm.astro';
import type { QuoteFormData } from '../components/QuoteForm.astro';

let formData: QuoteFormData = {};
let saveError = false;
---

<Layout title="Add a movie quote" page="add">
<main>
<h2>Add a quote</h2>
<QuoteForm action="/add" values={formData} saveError={saveError} submitLabel="Add quote" />
</main>
</Layout>

And now let's add a link to this page in the layout navigation in src/layouts/Layout.astro:

<nav class="prose mx-auto mb-6 border-y border-gray-200 flex">
<a href="/" class={`p-3 ${page === "listing" && navActiveClasses}`}>All quotes</a>
<a href="/add" class={`p-3 ${page === "add" && navActiveClasses}`}>Add a quote</a>
</nav>

Send form data to the API

When a user submits the add quote form we want to send the form data to our API +so it can then save it to our database. Let's wire that up now.

First we're going to create a new file, src/lib/request-utils.js:

export function isPostRequest (request) {
return request.method === 'POST'
}

export async function getFormData (request) {
const formData = await request.formData()

return Object.fromEntries(formData.entries())
}

Then let's update the component script in src/pages/add.astro to use +these new request utility functions:

---
import Layout from '../layouts/Layout.astro';
import QuoteForm from '../components/QuoteForm.astro';
import type { QuoteFormData } from '../components/QuoteForm.astro';

import { isPostRequest, getFormData } from '../lib/request-utils';

let formData: QuoteFormData = {};
let saveError = false;

if (isPostRequest(Astro.request)) {
formData = await getFormData(Astro.request);
}
---

When we create a new quote entity record via our API, we need to include a +movieId field that references a movie entity record. This means that when a +user submits the add quote form we need to:

  • Check if a movie entity record already exists with that movie name
  • Return the movie id if it does exist
  • If it doesn't exist, create a new movie entity record and return the movie ID

Let's update the import statement at the top of src/lib/quotes-api.js

-import { createClient } from '@urql/core'
+import { createClient, gql } from '@urql/core'

And then add a new method that will return a movie ID for us:

async function getMovieId (movieName) {
movieName = movieName.trim()

let movieId = null

// Check if a movie already exists with the provided name.
const queryMoviesResult = await quotesApi.query(
gql`
query ($movieName: String!) {
movies(where: { name: { eq: $movieName } }) {
id
}
}
`,
{ movieName }
)

if (queryMoviesResult.error) {
return null
}

const movieExists = queryMoviesResult.data?.movies.length === 1
if (movieExists) {
movieId = queryMoviesResult.data.movies[0].id
} else {
// Create a new movie entity record.
const saveMovieResult = await quotesApi.mutation(
gql`
mutation ($movieName: String!) {
saveMovie(input: { name: $movieName }) {
id
}
}
`,
{ movieName }
)

if (saveMovieResult.error) {
return null
}

movieId = saveMovieResult.data?.saveMovie.id
}

return movieId
}

And let's export it too:

export const quotesApi = {
async query (gqlQuery, queryVariables = {}) {
return await graphqlClientWrapper('query', gqlQuery, queryVariables)
},
async mutation (gqlQuery, queryVariables = {}) {
return await graphqlClientWrapper('mutation', gqlQuery, queryVariables)
},
getMovieId
}

Now we can wire up the last parts in the src/pages/add.astro component +script:

---
import Layout from '../layouts/Layout.astro';
import QuoteForm from '../components/QuoteForm.astro';
import type { QuoteFormData } from '../components/QuoteForm.astro';

import { quotesApi, gql } from '../lib/quotes-api';
import { isPostRequest, getFormData } from '../lib/request-utils';

let formData: QuoteFormData = {};
let saveError = false;

if (isPostRequest(Astro.request)) {
formData = await getFormData(Astro.request);

const movieId = await quotesApi.getMovieId(formData.movie);

if (movieId) {
const quote = {
quote: formData.quote,
saidBy: formData.saidBy,
movieId,
};

const { error } = await quotesApi.mutation(gql`
mutation($quote: QuoteInput!) {
saveQuote(input: $quote) {
id
}
}
`, { quote });

if (!error) {
return Astro.redirect('/');
} else {
saveError = true;
}
} else {
saveError = true;
}
}

Add autosuggest for movies

We can create a better experience for our users by autosuggesting the movie name +when they're adding a new quote.

Let's open up src/components/QuoteForm.astro and import our API helper methods +in the component script:

import { quotesApi, gql } from '../lib/quotes-api.js';

Then let's add in a query to our GraphQL API for all movies:

const { data } = await quotesApi.query(gql`
query {
movies {
name
}
}
`);

const movies = data?.movies || [];

Now lets update the Movie field in the component template to use the +array of movies that we've retrieved from the API:

<label for="movie" class="block">
<span>Movie</span>
<input list="movies" id="movie" name="movie" required="required" autocomplete="off" value={values.movie} class="form-input mt-1 w-full">
<datalist id="movies">
{movies.map(({ name }) => (
<option>{name}</option>
))}
</datalist>
</label>

Create an edit quote page

Let's create a new directory, src/pages/edit/:

mkdir src/pages/edit/

And inside of it, let's create a new page, [id].astro:

---
import Layout from '../../layouts/Layout.astro';
import QuoteForm, { QuoteFormData } from '../../components/QuoteForm.astro';

const id = Number(Astro.params.id);

let formValues: QuoteFormData = {};
let loadError = false;
let saveError = false;
---

<Layout title="Edit movie quote">
<main>
<h2>Edit quote</h2>
<QuoteForm action={`/edit/${id}`} values={formValues} saveError={saveError} loadError={loadError} submitLabel="Update quote" />
</main>
</Layout>

You'll see that we're using the same QuoteForm component that our add quote +page uses. Now we're going to wire up our edit page so that it can load an +existing quote from our API and save changes back to the API when the form is +submitted.

In the [id.astro] component script, let's add some code to take care of +these tasks:

---
import Layout from '../../layouts/Layout.astro';
import QuoteForm, { QuoteFormData } from '../../components/QuoteForm.astro';

import { quotesApi, gql } from '../../lib/quotes-api';
import { isPostRequest, getFormData } from '../../lib/request-utils';

const id = Number(Astro.params.id);

let formValues: QuoteFormData = {};
let loadError = false;
let saveError = false;

if (isPostRequest(Astro.request)) {
const formData = await getFormData(Astro.request);
formValues = formData;

const movieId = await quotesApi.getMovieId(formData.movie);

if (movieId) {
const quote = {
id,
quote: formData.quote,
saidBy: formData.saidBy,
movieId,
};

const { error } = await quotesApi.mutation(gql`
mutation($quote: QuoteInput!) {
saveQuote(input: $quote) {
id
}
}
`, { quote });

if (!error) {
return Astro.redirect('/');
} else {
saveError = true;
}
} else {
saveError = true;
}
} else {
const { data } = await quotesApi.query(gql`
query($id: ID!) {
getQuoteById(id: $id) {
id
quote
saidBy
movie {
id
name
}
}
}
`, { id });

if (data?.getQuoteById) {
formValues = {
...data.getQuoteById,
movie: data.getQuoteById.movie.name
};
} else {
loadError = true;
}
}
---

Load up http://localhost:3000/edit/1 in your +browser to test out the edit quote page.

Now we're going to add edit links to the quotes listing page. Let's start by +creating a new component src/components/QuoteActionEdit.astro:

---
export interface Props {
id: number;
}

const { id } = Astro.props;
---
<a href={`/edit/${id}`} class="flex items-center mr-5 text-gray-400 hover:text-yellow-600 underline decoration-yellow-600 decoration-2 underline-offset-4">
<svg class="w-6 h-6 mr-1" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24" fill="currentColor" class="w-6 h-6">
<path d="M21.731 2.269a2.625 2.625 0 00-3.712 0l-1.157 1.157 3.712 3.712 1.157-1.157a2.625 2.625 0 000-3.712zM19.513 8.199l-3.712-3.712-8.4 8.4a5.25 5.25 0 00-1.32 2.214l-.8 2.685a.75.75 0 00.933.933l2.685-.8a5.25 5.25 0 002.214-1.32l8.4-8.4z" />
<path d="M5.25 5.25a3 3 0 00-3 3v10.5a3 3 0 003 3h10.5a3 3 0 003-3V13.5a.75.75 0 00-1.5 0v5.25a1.5 1.5 0 01-1.5 1.5H5.25a1.5 1.5 0 01-1.5-1.5V8.25a1.5 1.5 0 011.5-1.5h5.25a.75.75 0 000-1.5H5.25z" />
</svg>
<span class="hover:underline hover:decoration-yellow-600">Edit</span>
</a>

Then let's import this component and use it in our listing page, +src/pages/index.astro:

---
import Layout from '../layouts/Layout.astro';
import QuoteActionEdit from '../components/QuoteActionEdit.astro';
import { quotesApi, gql } from '../lib/quotes-api';

// ...
---

<Layout title="All quotes" page="listing">
<main>
{quotes.length > 0 ? quotes.map((quote) => (
<div class="border-b mb-6">
...
<div class="flex flex-col mb-6 text-gray-400">
<span class="flex items-center">
<QuoteActionEdit id={quote.id} />
</span>
<span class="mt-4 text-gray-400 italic">Added {new Date(Number(quote.createdAt)).toUTCString()}</span>
</div>
</div>
)) : (
<p>No movie quotes have been added.</p>
)}
</main>
</Layout>

Add delete quote functionality

Our Movie Quotes app can create, retrieve and update quotes. Now we're going +to implement the D in CRUD — delete!

First let's create a new component, src/components/QuoteActionDelete.astro:

---
export interface Props {
id: number;
}

const { id } = Astro.props;
---
<form method="POST" action={`/delete/${id}`} class="form-delete-quote m-0">
<button type="submit" class="flex items-center text-gray-400 hover:text-red-700 underline decoration-red-700 decoration-2 underline-offset-4">
<svg class="w-6 h-6 mr-1" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24" fill="currentColor" class="w-6 h-6">
<path fill-rule="evenodd" d="M12 2.25c-5.385 0-9.75 4.365-9.75 9.75s4.365 9.75 9.75 9.75 9.75-4.365 9.75-9.75S17.385 2.25 12 2.25zm-1.72 6.97a.75.75 0 10-1.06 1.06L10.94 12l-1.72 1.72a.75.75 0 101.06 1.06L12 13.06l1.72 1.72a.75.75 0 101.06-1.06L13.06 12l1.72-1.72a.75.75 0 10-1.06-1.06L12 10.94l-1.72-1.72z" clip-rule="evenodd" />
</svg>
<span>Delete</span>
</button>
</form>

And then we'll drop it into our listing page, src/pages/index.astro:

---
import Layout from '../layouts/Layout.astro';
import QuoteActionEdit from '../components/QuoteActionEdit.astro';
import QuoteActionDelete from '../components/QuoteActionDelete.astro';
import { quotesApi, gql } from '../lib/quotes-api';

// ...
---

<Layout title="All quotes" page="listing">
<main>
{quotes.length > 0 ? quotes.map((quote) => (
<div class="border-b mb-6">
...
<div class="flex flex-col mb-6 text-gray-400">
<span class="flex items-center">
<QuoteActionEdit id={quote.id} />
<QuoteActionDelete id={quote.id} />
</span>
<span class="mt-4 text-gray-400 italic">Added {new Date(Number(quote.createdAt)).toUTCString()}</span>
</div>
</div>
...

At the moment when a delete form is submitted from our listing page, we get +an Astro 404 page. Let's fix this by creating a new directory, src/pages/delete/:

mkdir src/pages/delete/

And inside of it, let's create a new page, [id].astro:

---
import Layout from '../../layouts/Layout.astro';

import { quotesApi, gql } from '../../lib/quotes-api';
import { isPostRequest } from '../../lib/request-utils';

if (isPostRequest(Astro.request)) {
const id = Number(Astro.params.id);

const { error } = await quotesApi.mutation(gql`
mutation($id: ID!) {
deleteQuotes(where: { id: { eq: $id }}) {
id
}
}
`, { id });

if (!error) {
return Astro.redirect('/');
}
}
---
<Layout title="Delete movie quote">
<main>
<h2>Delete quote</h2>
<p class="text-lg bg-red-200 p-4">There was an error deleting the quote. Please try again.</p>
</main>
</Layout>

Now if we click on a delete quote button on our listings page, it should call our +GraphQL API to delete the quote. To make this a little more user friendly, let's +add in a confirmation dialog so that users don't delete a quote by accident.

Let's create a new directory, src/scripts/:

mkdir src/scripts/

And inside of that directory let's create a new file, quote-actions.js:

// src/scripts/quote-actions.js

export function confirmDeleteQuote (form) {
if (confirm('Are you sure want to delete this quote?')) {
form.submit()
}
}

Then we can pull it in as client side JavaScript on our listing page, +src/pages/index.astro:

<Layout>
...
</Layout>

<script>
import { confirmDeleteQuote } from '../scripts/quote-actions.js'

addEventListener('DOMContentLoaded', () => {
document.querySelectorAll('.form-delete-quote').forEach((deleteForm) => {
deleteForm.addEventListener('submit', (event) => {
event.preventDefault()
confirmDeleteQuote(event.currentTarget)
})
})
})
</script>

Build a "like" quote feature

We've built all the basic CRUD (Create, Retrieve, Update & Delete) features +into our application. Now let's build a feature so that users can interact +and "like" their favourite movie quotes.

To build this feature we're going to add custom functionality to our API +and then add a new component, along with some client side JavaScript, to +our frontend.

Create an API migration

We're now going to work on the code for API, under the apps/movie-quotes-api +directory.

First let's create a migration that adds a likes column to our quotes +database table. We'll create a new migration file, migrations/003.do.sql:

ALTER TABLE quotes ADD COLUMN likes INTEGER default 0;

This migration will automatically be applied when we next start our Platformatic +API.

Create an API plugin

To add custom functionality to our Platformatic API, we need to create a +Fastify plugin and +update our API configuration to use it.

Let's create a new file, plugin.js, and inside it we'll add the skeleton +structure for our plugin:

// plugin.js

'use strict'

module.exports = async function plugin (app) {
app.log.info('plugin loaded')
}

Now let's register our plugin in our API configuration file, platformatic.db.json:

{
...
"migrations": {
"dir": "./migrations"
},
"plugins": {
"paths": ["./plugin.js"]
}
}

And then we'll start up our Platformatic API:

npm run dev

We should see log messages that tell us that our new migration has been +applied and our plugin has been loaded:

[10:09:20.052] INFO (146270): running 003.do.sql
[10:09:20.129] INFO (146270): plugin loaded
[10:09:20.209] INFO (146270): server listening
url: "http://127.0.0.1:3042"

Now it's time to start adding some custom functionality inside our plugin.

Add a REST API route

We're going to add a REST route to our API that increments the count of +likes for a specific quote: /quotes/:id/like

First let's add fluent-json-schema as a dependency for our API:

npm install fluent-json-schema

We'll use fluent-json-schema to help us generate a JSON Schema. We can then +use this schema to validate the request path parameters for our route (id).

tip

You can use fastify-type-provider-typebox or typebox if you want to convert your JSON Schema into a Typescript type. See this GitHub thread to have a better overview about it. Look at the example below to have a better overview.

Here you can see in practice of to leverage typebox combined with fastify-type-provider-typebox:

import { FastifyInstance } from "fastify";
import { Static, Type } from "@sinclair/typebox";
import { TypeBoxTypeProvider } from "@fastify/type-provider-typebox";

/**
* Creation of the JSON schema needed to validate the params passed to the route
*/
const schemaParams = Type.Object({
num1: Type.Number(),
num2: Type.Number(),
});

/**
* We convert the JSON schema to the TypeScript type, in this case:
* {
num1: number;
num2: number;
}
*/
type Params = Static<typeof schemaParams>;

/**
* Here we can pass the type previously created to our syncronous unit function
*/
const multiplication = ({ num1, num2 }: Params) => num1 * num2;

export default async function (app: FastifyInstance) {
app.withTypeProvider<TypeBoxTypeProvider>().get(
"/multiplication/:num1/:num2",
{ schema: { params: schemaParams } },
/**
* Since we leverage `withTypeProvider<TypeBoxTypeProvider>()`,
* we no longer need to explicitly define the `params`.
* The will be automatically inferred as:
* {
num1: number;
num2: number;
}
*/
({ params }) => multiplication(params)
);
}

Now let's add our REST API route in plugin.js:

'use strict'

const S = require('fluent-json-schema')

module.exports = async function plugin (app) {
app.log.info('plugin loaded')

// This JSON Schema will validate the request path parameters.
// It reuses part of the schema that Platormatic DB has
// automatically generated for our Quote entity.
const schema = {
params: S.object().prop('id', app.getSchema('Quote').properties.id)
}

app.post('/quotes/:id/like', { schema }, async function (request, response) {
return {}
})
}

We can now make a POST request to our new API route:

curl --request POST http://localhost:3042/quotes/1/like
info

Learn more about how validation works in the +Fastify validation documentation.

Our API route is currently returning an empty object ({}). Let's wire things +up so that it increments the number of likes for the quote with the specified ID. +To do this we'll add a new function inside of our plugin:

module.exports = async function plugin (app) {
app.log.info('plugin loaded')

async function incrementQuoteLikes (id) {
const { db, sql } = app.platformatic

const result = await db.query(sql`
UPDATE quotes SET likes = likes + 1 WHERE id=${id} RETURNING likes
`)

return result[0]?.likes
}

// ...
}

And then we'll call that function in our route handler function:

app.post('/quotes/:id/like', { schema }, async function (request, response) {
return { likes: await incrementQuoteLikes(request.params.id) }
})

Now when we make a POST request to our API route:

curl --request POST http://localhost:3042/quotes/1/like

We should see that the likes value for the quote is incremented every time +we make a request to the route.

{"likes":1}

Add a GraphQL API mutation

We can add a likeQuote mutation to our GraphQL API by reusing the +incrementQuoteLikes function that we just created.

Let's add this code at the end of our plugin, inside plugin.js:

module.exports = async function plugin (app) {
// ...

app.graphql.extendSchema(`
extend type Mutation {
likeQuote(id: ID!): Int
}
`)

app.graphql.defineResolvers({
Mutation: {
likeQuote: async (_, { id }) => await incrementQuoteLikes(id)
}
})
}

The code we've just added extends our API's GraphQL schema and defines +a corresponding resolver for the likeQuote mutation.

We can now load up GraphiQL in our web browser and try out our new likeQuote +mutation with this GraphQL query:

mutation {
likeQuote(id: 1)
}
info

Learn more about how to extend the GraphQL schema and define resolvers in the +Mercurius API documentation.

Enable CORS on the API

When we build "like" functionality into our frontend, we'll be making a client +side HTTP request to our GraphQL API. Our backend API and our frontend are running +on different origins, so we need to configure our API to allow requests from +the frontend. This is known as Cross-Origin Resource Sharing (CORS).

To enable CORS on our API, let's open up our API's .env file and add in +a new setting:

PLT_SERVER_CORS_ORIGIN=http://localhost:3000

The value of PLT_SERVER_CORS_ORIGIN is our frontend application's origin.

Now we can add a cors configuration object in our API's configuration file, +platformatic.db.json:

{
"server": {
"logger": {
"level": "{PLT_SERVER_LOGGER_LEVEL}"
},
"hostname": "{PLT_SERVER_HOSTNAME}",
"port": "{PORT}",
"cors": {
"origin": "{PLT_SERVER_CORS_ORIGIN}"
}
},
...
}

The HTTP responses from all endpoints on our API will now include the header:

access-control-allow-origin: http://localhost:3000

This will allow JavaScript running on web pages under the http://localhost:3000 +origin to make requests to our API.

Add like quote functionality

Now that our API supports "liking" a quote, let's integrate it as a feature in +our frontend.

First we'll create a new component, src/components/QuoteActionLike.astro:

---
export interface Props {
id: number;
likes: number;
}

const { id, likes } = Astro.props;
---
<span data-quote-id={id} class="like-quote cursor-pointer mr-5 flex items-center">
<svg class="like-icon w-6 h-6 mr-2 text-red-600" xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 24 24" stroke-width="1.5" stroke="currentColor" class="w-6 h-6">
<path stroke-linecap="round" stroke-linejoin="round" d="M21 8.25c0-2.485-2.099-4.5-4.688-4.5-1.935 0-3.597 1.126-4.312 2.733-.715-1.607-2.377-2.733-4.313-2.733C5.1 3.75 3 5.765 3 8.25c0 7.22 9 12 9 12s9-4.78 9-12z" />
</svg>
<span class="likes-count w-8">{likes}</span>
</span>

<style>
.like-quote:hover .like-icon,
.like-quote.liked .like-icon {
fill: currentColor;
}
</style>

And in our listing page, src/pages/index.astro, let's import our new +component and add it into the interface:

---
import Layout from '../layouts/Layout.astro';
import QuoteActionEdit from '../components/QuoteActionEdit.astro';
import QuoteActionDelete from '../components/QuoteActionDelete.astro';
import QuoteActionLike from '../components/QuoteActionLike.astro';
import { quotesApi, gql } from '../lib/quotes-api';

// ...
---

<Layout title="All quotes" page="listing">
<main>
{quotes.length > 0 ? quotes.map((quote) => (
<div class="border-b mb-6">
...
<div class="flex flex-col mb-6 text-gray-400">
<span class="flex items-center">
<QuoteActionLike id={quote.id} likes={quote.likes} />
<QuoteActionEdit id={quote.id} />
<QuoteActionDelete id={quote.id} />
</span>
<span class="mt-4 text-gray-400 italic">Added {new Date(Number(quote.createdAt)).toUTCString()}</span>
</div>
</div>
...

Then let's update the GraphQL query in this component's script to retrieve the +likes field for all quotes:

const { data } = await quotesApi.query(gql`
query {
quotes {
id
quote
saidBy
likes
createdAt
movie {
id
name
}
}
}
`);

Now we have the likes showing for each quote, let's wire things up so that +clicking on the like component for a quote will call our API and add a like.

Let's open up src/scripts/quote-actions.js and add a new function that +makes a request to our GraphQL API:

import { quotesApi, gql } from '../lib/quotes-api.js'

export function confirmDeleteQuote (form) {
if (confirm('Are you sure want to delete this quote?')) {
form.submit()
}
}

export async function likeQuote (likeQuote) {
likeQuote.classList.add('liked')
likeQuote.classList.remove('cursor-pointer')

const id = Number(likeQuote.dataset.quoteId)

const { data } = await quotesApi.mutation(gql`
mutation($id: ID!) {
likeQuote(id: $id)
}
`, { id })

if (data?.likeQuote) {
likeQuote.querySelector('.likes-count').innerText = data.likeQuote
}
}

And then let's attach the likeQuote function to the click event for each +like quote component on our listing page. We can do this by adding a little +extra code inside the <script> block in src/pages/index.astro:

<script>
import { confirmDeleteQuote, likeQuote } from '../scripts/quote-actions.js'

addEventListener('DOMContentLoaded', () => {
document.querySelectorAll('.form-delete-quote').forEach((deleteForm) => {
deleteForm.addEventListener('submit', (event) => {
event.preventDefault()
confirmDeleteQuote(event.currentTarget)
})
})

document.querySelectorAll('.like-quote').forEach((container) => {
container.addEventListener('click', (event) => likeQuote(event.currentTarget), { once: true })
})
})
</script>

Sort the listing by top quotes

Now that users can like their favourite quotes, as a final step, we'll allow +for sorting quotes on the listing page by the number of likes they have.

Let's update src/pages/index.astro to read a sort query string parameter +and use it the GraphQL query that we make to our API:

---
// ...

const allowedSortFields = ["createdAt", "likes"];
const searchParamSort = new URL(Astro.request.url).searchParams.get("sort");
const sort = allowedSortFields.includes(searchParamSort) ? searchParamSort : "createdAt";

const { data } = await quotesApi.query(gql`
query {
quotes(orderBy: {field: ${sort}, direction: DESC}) {
id
quote
saidBy
likes
createdAt
movie {
id
name
}
}
}
`);

const quotes = data?.quotes || [];
---
<Layout title="All quotes" page={`listing-${sort}`}>
...

Then let's replace the 'All quotes' link in the <nav> in src/layouts/Layout.astro +with two new links:

<nav class="prose mx-auto mb-6 border-y border-gray-200 flex">
<a href="/?sort=createdAt" class={`p-3 ${page === "listing-createdAt" && navActiveClasses}`}>Latest quotes</a>
<a href="/?sort=likes" class={`p-3 ${page === "listing-likes" && navActiveClasses}`}>Top quotes</a>
<a href="/add" class={`p-3 ${page === "add" && navActiveClasses}`}>Add a quote</a>
</nav>

With these few extra lines of code, our users can now sort quotes by when they +were created or by the number of likes that they have. Neat!

Wrapping up

And we're done — you now have the knowledge you need to build a full stack +application on top of Platformatic DB.

We can't wait to see what you'll build next!

+ + + + \ No newline at end of file diff --git a/docs/0.42.0/getting-started/new-api-project-instructions/index.html b/docs/0.42.0/getting-started/new-api-project-instructions/index.html new file mode 100644 index 00000000000..c90f46da27b --- /dev/null +++ b/docs/0.42.0/getting-started/new-api-project-instructions/index.html @@ -0,0 +1,20 @@ + + + + + +new-api-project-instructions | Platformatic Open Source Software + + + + + +
+
Version: 0.42.0

new-api-project-instructions

Run this command in your terminal to start the Platformatic creator wizard:

npm create platformatic@latest

This interactive command-line tool will ask you some questions about how you'd +like to set up your new Platformatic project. For this guide, select these options:

- Which kind of project do you want to create?  => DB
- Where would you like to create your project? => quick-start
- Do you want to create default migrations? => Yes
- Do you want to create a plugin? => Yes
- Do you want to use TypeScript? => No
- Do you want to install dependencies? => Yes (this can take a while)
- Do you want to apply the migrations? => Yes
- Do you want to generate types? => Yes
- Do you want to create the github action to deploy this application to Platformatic Cloud dynamic workspace? => No
- Do you want to create the github action to deploy this application to Platformatic Cloud static workspace? => No

Once the wizard is complete, you'll have a Platformatic app project in the +folder quick-start, with example migration files and a plugin script.

info

Make sure you run the npm/yarn/pnpm command install command manually if you +don't ask the wizard to do it for you.

+ + + + \ No newline at end of file diff --git a/docs/0.42.0/getting-started/quick-start-guide/index.html b/docs/0.42.0/getting-started/quick-start-guide/index.html new file mode 100644 index 00000000000..9b8304414fa --- /dev/null +++ b/docs/0.42.0/getting-started/quick-start-guide/index.html @@ -0,0 +1,38 @@ + + + + + +Quick Start Guide | Platformatic Open Source Software + + + + + +
+
Version: 0.42.0

Quick Start Guide

In this guide you'll learn how to create and run your first API with +Platformatic DB. Let's get started!

info

This guide uses SQLite for the database, but +Platformatic DB also supports PostgreSQL, +MySQL and MariaDB databases.

Prerequisites

Platformatic supports macOS, Linux and Windows (WSL recommended).

To follow along with this guide you'll need to have these things installed:

Create a new API project

Automatic CLI

Run this command in your terminal to start the Platformatic creator wizard:

npm create platformatic@latest

This interactive command-line tool will ask you some questions about how you'd +like to set up your new Platformatic project. For this guide, select these options:

- Which kind of project do you want to create?  => DB
- Where would you like to create your project? => quick-start
- Do you want to create default migrations? => Yes
- Do you want to create a plugin? => Yes
- Do you want to use TypeScript? => No
- Do you want to install dependencies? => Yes (this can take a while)
- Do you want to apply the migrations? => Yes
- Do you want to generate types? => Yes
- Do you want to create the github action to deploy this application to Platformatic Cloud dynamic workspace? => No
- Do you want to create the github action to deploy this application to Platformatic Cloud static workspace? => No

Once the wizard is complete, you'll have a Platformatic app project in the +folder quick-start, with example migration files and a plugin script.

info

Make sure you run the npm/yarn/pnpm command install command manually if you +don't ask the wizard to do it for you.

Start your API server

In your project directory, run this command to start your API server:

npm start

Your Platformatic API is now up and running! 🌟

This command will:

  • Automatically map your SQL database to REST and GraphQL API interfaces.
  • Start the Platformatic API server.

You can jump down to Next steps or read on to learn more about +the project files that the wizard has created for you.

Check the database schema

In your project directory (quick-start), open the migrations directory that can store your database migration files that will contain both the 001.do.sql and 001.undo.sql files. The 001.do.sql file contains the SQL statements to create the database objects, while the 001.undo.sql file contains the SQL statements to drop them.

migrations/001.do.sql
CREATE TABLE IF NOT EXISTS movies (
id INTEGER PRIMARY KEY,
title TEXT NOT NULL
);

Note that this migration has been already applied by Platformatic creator.

Check your API configuration

In your project directory, check the Platformatic configuration file named +platformatic.db.json and the environment file named .env:

The created configuration tells Platformatic to:

  • Run an API server on http://127.0.0.1:3042/
  • Connect to an SQLite database stored in a file named db.sqlite
  • Look for database migration files in the migrations directory
  • Load the plugin file named plugin.js and automatically generate types
tip

The Configuration reference explains all of the +supported configuration options.

Manual setup

Create a directory for your new API project:

mkdir quick-start

cd quick-start

Then create a package.json file and install the platformatic +CLI as a project dependency:

npm init --yes

npm install platformatic

Add a database schema

In your project directory (quick-start), create a file for your sqlite3 database and also, a migrations directory to +store your database migration files:

touch db.sqlite

mkdir migrations

Then create a new migration file named 001.do.sql in the migrations +directory.

Copy and paste this SQL query into the migration file:

migrations/001.do.sql
CREATE TABLE movies (
id INTEGER PRIMARY KEY,
title VARCHAR(255) NOT NULL
);

When it's run by Platformatic, this query will create a new database table +named movies.

tip

You can check syntax for SQL queries on the Database.Guide SQL Reference.

Configure your API

In your project directory, create a new Platformatic configuration file named +platformatic.db.json.

Copy and paste in this configuration:

platformatic.db.json
{
"server": {
"hostname": "127.0.0.1",
"port": "3042"
},
"db": {
"connectionString": "sqlite://./db.sqlite"
},
"migrations": {
"dir": "./migrations",
"autoApply": "true"
}
}

This configuration tells Platformatic to:

  • Run an API server on http://127.0.0.1:3042/
  • Connect to an SQLite database stored in a file named db.sqlite
  • Look for, and apply the database migrations specified in the migrations directory
tip

The Configuration reference explains all of the +supported configuration options.

Start your API server

In your project directory, use the Platformatic CLI to start your API server:

npx platformatic db start

This will:

  • Automatically map your SQL database to REST and GraphQL API interfaces.
  • Start the Platformatic API server.

Your Platformatic API is now up and running! 🌟

Next steps

Use the REST API interface

You can use cURL to make requests to the REST interface of your API, for example:

Create a new movie

curl -X POST -H "Content-Type: application/json" \
-d "{ \"title\": \"Hello Platformatic DB\" }" \
http://localhost:3042/movies

You should receive a response from your API like this:

{"id":1,"title":"Hello Platformatic DB"}

Get all movies

curl http://localhost:3042/movies

You should receive a response from your API like this, with an array +containing all the movies in your database:

[{"id":1,"title":"Hello Platformatic DB"}]
tip

If you would like to know more about what routes are automatically available, +take a look at the REST API reference +for an overview of the REST interface that the generated API provides.

Swagger OpenAPI documentation

You can explore the OpenAPI documentation for your REST API in the Swagger UI at +http://localhost:3042/documentation

Use the GraphQL API interface

Open http://localhost:3042/graphiql in your +web browser to explore the GraphQL interface of your API.

Try out this GraphQL query to retrieve all movies from your API:

query {
movies {
id
title
}
}
tip

Learn more about your API's GraphQL interface in the +GraphQL API reference.

+ + + + \ No newline at end of file diff --git a/docs/0.42.0/guides/add-custom-functionality/extend-graphql/index.html b/docs/0.42.0/guides/add-custom-functionality/extend-graphql/index.html new file mode 100644 index 00000000000..8cbcf28cd9e --- /dev/null +++ b/docs/0.42.0/guides/add-custom-functionality/extend-graphql/index.html @@ -0,0 +1,18 @@ + + + + + +Extend GraphQL Schema | Platformatic Open Source Software + + + + + +
+
Version: 0.42.0

Extend GraphQL Schema

Sum Function

Copy and paste this code into ./sample-plugin.js file

'use strict'
module.exports = async(app, opts) => {
app.graphql.extendSchema(`
extend type Query {
add(x: Int, y: Int): Int
}
`)
app.graphql.defineResolvers({
Query: {
add: async (_, { x, y }) => x + y
}
})
}

This will add a new GraphQL query called add which will simply add the two inputs x and y provided.

You don't need to reload the server, since it will watch this file and hot-reload itself. +Let's query the server with the following body


query{
add(x: 1, y: 2)
}

You can use curl command to run this query

$ curl --location --request POST 'http://localhost:3042/graphql' \
--header 'Content-Type: application/json' \
--data-raw '{"query":"query{\n add(x: 1, y: 2)\n}"}'

You will get this output, with the sum.

{
"data": {
"add": 3
}
}

Extend Entities API

Let's implement a getPageByTitle query

'use strict'
module.exports = async(app, opts) => {
app.graphql.extendSchema(`
extend type Query {
getPageByTitle(title: String): Page
}
`)
app.graphql.defineResolvers({
Query: {
getPageByTitle: async(_, { title }) => {
const res = await app.platformatic.entities.page.find({
where: {
title: {
eq: title
}
}
})
if (res) {
return res[0]
}
return null
}
}
})
}

Page GraphQL type is already defined by Platformatic DB on start.

We are going to run this code against this GraphQL query

query{
getPageByTitle(title: "First Page"){
id
title
}
}

You can use curl command to run this query

$ curl --location --request POST 'http://localhost:3042/graphql' \
--header 'Content-Type: application/json' \
--data-raw '{"query":"query{\n getPageByTitle(title: \"First Page\"){\n id\n title\n }\n}"}'

You will get an output similar to this

{
"data": {
"getPageByTitle": {
"id": "1",
"title": "First Page"
}
}
}
+ + + + \ No newline at end of file diff --git a/docs/0.42.0/guides/add-custom-functionality/extend-rest/index.html b/docs/0.42.0/guides/add-custom-functionality/extend-rest/index.html new file mode 100644 index 00000000000..83e5084514c --- /dev/null +++ b/docs/0.42.0/guides/add-custom-functionality/extend-rest/index.html @@ -0,0 +1,17 @@ + + + + + +Extend REST API | Platformatic Open Source Software + + + + + +
+
Version: 0.42.0

Extend REST API

We will follow same examples implemented in GraphQL examples: a sum function and an API to get pages by title.

Sum Function

Copy and paste this code into ./sample-plugin.js file

'use strict'
module.exports = async(app, opts) => {
app.post('/sum', async(req, reply) => {
const { x, y } = req.body
return { sum: (x + y)}
})
}

You don't need to reload the server, since it will watch this file and hot-reload itself.

Let's make a POST /sum request to the server with the following body

{
"x": 1,
"y": 2
}

You can use curl command to run this query

$ curl --location --request POST 'http://localhost:3042/sum' \
--header 'Content-Type: application/json' \
--data-raw '{
"x": 1,
"y": 2
}'

You will get this output, with the sum.

{
"sum": 3
}

Extend Entities API

Let's implement a /page-by-title endpoint, using Entities API

'use strict'
module.exports = async(app, opts) => {
app.get('/page-by-title', async(req, reply) => {
const { title } = req.query
const res = await app.platformatic.entities.page.find({
where: {
title: {
eq: title
}
}
})
if (res) {
return res[0]
}
return null
})
}

We will make a GET /page-by-title?title=First%20Page request, and we expect a single page as output.

You can use curl command to run this query

$ curl --location --request GET 'http://localhost:3042/page-by-title?title=First Page'

You will get an output similar to this

{
"id": "1",
"title": "First Page",
"body": "This is the first sample page"
}
+ + + + \ No newline at end of file diff --git a/docs/0.42.0/guides/add-custom-functionality/introduction/index.html b/docs/0.42.0/guides/add-custom-functionality/introduction/index.html new file mode 100644 index 00000000000..c6c32140caa --- /dev/null +++ b/docs/0.42.0/guides/add-custom-functionality/introduction/index.html @@ -0,0 +1,17 @@ + + + + + +Add Custom Functionality | Platformatic Open Source Software + + + + + +
+
Version: 0.42.0

Add Custom Functionality

If you want to extend Platformatic DB features, it is possible to register a plugin, which will be in the form of a standard Fastify plugin.

The config file will specify where the plugin file is located as the example below:

{
...
"plugins": {
"paths": ["./plugin/index.js"]
}
}

The path is relative to the config file path.

Since it uses fastify-isolate under the hood, all other options of that package may be specified under the plugin property.

Once the config file is set up, you can write your plugin

module.exports = async function (app) {
app.log.info('plugin loaded')
// Extend GraphQL Schema with resolvers
app.graphql.extendSchema(`
extend type Query {
add(x: Int, y: Int): Int
}
`)
app.graphql.defineResolvers({
Query: {
add: async (_, { x, y }) => x + y
}
})

// Create a new route, see https://www.fastify.io/docs/latest/Reference/Routes/ for more info
app.post('/sum', (req, reply) => {
const {x, y} = req.body
return { result: x + y }
})

// access platformatic entities data
app.get('/all-entities', (req, reply) => {
const entities = Object.keys(app.platformatic.entities)
return { entities }
})
}

+ + + + \ No newline at end of file diff --git a/docs/0.42.0/guides/add-custom-functionality/prerequisites/index.html b/docs/0.42.0/guides/add-custom-functionality/prerequisites/index.html new file mode 100644 index 00000000000..3b3bed1d956 --- /dev/null +++ b/docs/0.42.0/guides/add-custom-functionality/prerequisites/index.html @@ -0,0 +1,17 @@ + + + + + +Prerequisites | Platformatic Open Source Software + + + + + +
+
Version: 0.42.0

Prerequisites

In the following examples we assume you already

  • cloned platformatic/platformatic repo from Github
  • ran pnpm install to install all dependencies
  • have Docker and docker-compose installed and running on your machine

Config File

Create a platformatic.db.json file in the root project, it will be loaded automatically by Platformatic (no need of -c, --config flag).

{
"server": {
"hostname": "127.0.0.1",
"port": 3042,
"logger": {
"level": "info"
}
},
"db": {
"connectionString": "postgres://postgres:postgres@127.0.0.1/postgres"
},
"migrations": {
"dir": "./migrations",
"table": "versions"
},
"plugins": {
"paths": ["plugin.js"]
}
}
  • Once Platformatic DB starts, its API will be available at http://127.0.0.1:3042
  • It will connect and read the schema from a PostgreSQL DB
  • Will read migrations from ./migrations directory
  • Will load custom functionallity from ./plugin.js file.

Database and Migrations

Start the database using the sample docker-compose.yml file.

$ docker-compose up -d postgresql

For migrations create a ./migrations directory and a 001.do.sql file with following contents

CREATE TABLE pages (
id SERIAL PRIMARY KEY,
title VARCHAR(255) NOT NULL,
body TEXT NOT NULL
);
INSERT INTO pages (title, body) VALUES ('First Page', 'This is the first sample page');
INSERT INTO pages (title, body) VALUES ('Second Page', 'This is the second sample page');
INSERT INTO pages (title, body) VALUES ('Third Page', 'This is the third sample page');

Plugin

Copy and paste this boilerplate code into ./plugin.js file. We will fill this in the examples.

'use strict'

module.exports = async (app, opts) {
// we will fill this later
}

Start the server

Run

$ platformatic db start

You will get an output similar to this

                           /////////////
///// /////
/// ///
/// ///
/// ///
&& /// /// &&
&&&&&& /// /// &&&&&&
&&&& /// /// &&&&
&&& /// /// &&&&&&&&&&&&
&&& /// /////// //// && &&&&&
&& /// /////////////// &&&
&&& /// /// &&&
&&& /// // &&
&&& /// &&
&&& /// &&&
&&&& /// &&&
&&&&&% /// &&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&
///
///
///
///
///
///

[11:19:46.562] INFO (65122): running 001.do.sql
[11:19:46.929] INFO (65122): server listening
url: "http://127.0.0.1:3042"

Now is possible to create some examples, like extend GraphQL Schema, extend REST API

+ + + + \ No newline at end of file diff --git a/docs/0.42.0/guides/add-custom-functionality/raw-sql/index.html b/docs/0.42.0/guides/add-custom-functionality/raw-sql/index.html new file mode 100644 index 00000000000..1e69f5a3265 --- /dev/null +++ b/docs/0.42.0/guides/add-custom-functionality/raw-sql/index.html @@ -0,0 +1,17 @@ + + + + + +Raw SQL queries | Platformatic Open Source Software + + + + + +
+
Version: 0.42.0

Raw SQL queries

To run raw SQL queries using plugins, use app.platformatic.db.query method and passe to it a sql query using the app.platformatic.sql method.

'use strict'
module.exports = async(app, opts) => {
app.graphql.extendSchema(`
type YearlySales {
year: Int
sales: Int
}

extend type Query {
yearlySales: [YearlySales]
}
`)
app.graphql.defineResolvers({
Query: {
yearlySales: async(_, { title }) => {
const { db, sql } = app.platformatic;
const res = await db.query(sql(`
SELECT
YEAR(created_at) AS year,
SUM(amount) AS sales
FROM
orders
GROUP BY
YEAR(created_at)
`))
return res
}
}
})
}
+ + + + \ No newline at end of file diff --git a/docs/0.42.0/guides/compiling-typescript-for-deployment/index.html b/docs/0.42.0/guides/compiling-typescript-for-deployment/index.html new file mode 100644 index 00000000000..0aec9c7cee5 --- /dev/null +++ b/docs/0.42.0/guides/compiling-typescript-for-deployment/index.html @@ -0,0 +1,25 @@ + + + + + +Compiling Typescript for Deployment | Platformatic Open Source Software + + + + + +
+
Version: 0.42.0

Compiling Typescript for Deployment

Platformatic Service provides automatic TypeScript compilation during the startup +of your Node.js server. While this provides an amazing developer experience, in production it adds additional +start time and it requires more resources. In this guide, we show how to compile your TypeScript +source files before shipping to a server.

Setup

The following is supported by all Platformatic applications, as they are all based on the same plugin system. +If you have generated your application using npx create-platformatic@latest, you will have a similar section in your config file:

{
...
"plugins": {
"paths": [{
"path": "plugins",
"encapsulate": false
}, "routes"],
"typescript": "{PLT_TYPESCRIPT}"
}
}

Note that the {PLT_TYPESCRIPT} will be automatically replaced with the PLT_TYPESCRIPT environment variable, that is configured in your +.env (and .env.sample) file:

PLT_TYPESCRIPT=true

Older Platformatic applications might not have the same layout, if so you can update your settings to match (after updating your dependencies).

Compiling for deployment

Compiling for deployment is then as easy as running plt service compile in that same folder. +Rememeber to set PLT_TYPESCRIPT=false in your environment variables in the deployed environments.

Usage with Runtime

If you are building a Runtime-based application, you will need +to compile every service independently or use the plt runtime compile command.

Avoid shipping TypeScript sources

If you want to avoid shipping the TypeScript sources you need to configure Platformatic with the location +where your files have been built by adding an outDir option:

{
...
"plugins": {
"paths": [{
"path": "plugins",
"encapsulate": false
}, "routes"],
"typescript": {
"enabled": "{PLT_TYPESCRIPT}",
"outDir": "dist"
}
}
}

This is not necessary if you include tsconfig.json together with the compiled code.

+ + + + \ No newline at end of file diff --git a/docs/0.42.0/guides/debug-platformatic-db/index.html b/docs/0.42.0/guides/debug-platformatic-db/index.html new file mode 100644 index 00000000000..c7ac06ac11e --- /dev/null +++ b/docs/0.42.0/guides/debug-platformatic-db/index.html @@ -0,0 +1,17 @@ + + + + + +Debug Platformatic DB | Platformatic Open Source Software + + + + + +
+
Version: 0.42.0

Debug Platformatic DB

Error: No tables found in the database

  • Verify your database connection string is correct in your Platformatic DB configuration
    • Make sure the database name is correct
  • Ensure that you have run the migration command npx platformatic db migrations apply before starting the server. See the Platformatic DB Migrations documentation for more information on working with migrations.

Logging SQL queries

You can see all the queries that are being run against your database in your terminal by setting the logger level to trace in your platformatic.db.json config file:

platformatic.db.json
{
"server": {
"logger": {
"level": "trace"
}
}
}
+ + + + \ No newline at end of file diff --git a/docs/0.42.0/guides/deploying-on-lambda/index.html b/docs/0.42.0/guides/deploying-on-lambda/index.html new file mode 100644 index 00000000000..0a5b4a20208 --- /dev/null +++ b/docs/0.42.0/guides/deploying-on-lambda/index.html @@ -0,0 +1,26 @@ + + + + + +Deploying on AWS Lambda | Platformatic Open Source Software + + + + + +
+
Version: 0.42.0

Deploying on AWS Lambda

It is possible to deploy Platformatic applications to AWS Lambda +by leveraging @fastify/aws-lambda.

Once you set up your Platformatic DB application, such as following +our tutorial, you can create a +server.mjs file as follows:

import awsLambdaFastify from '@fastify/aws-lambda'
import { buildServer } from '@platformatic/db'

const app = await buildServer('./platformatic.db.json')
// You can use the same approach with both Platformatic DB and
// and service
// const app = await buildServer('./platformatic.service.json')

// The following also work for Platformatic Service applications
// import { buildServer } from '@platformatic/service'
export const handler = awsLambdaFastify(app)

// Loads the Application, must be after the call to `awsLambdaFastify`
await app.ready()

This would be the entry point for your AWS Lambda function.

Avoiding cold start

Caching the DB schema

If you use Platformatic DB, you want to turn on the schemalock +configuration to cache the schema +information on disk.

Set the db.schemalock configuration to true, start the application, +and a schema.lock file should appear. Make sure to commit that file and +deploy your lambda.

Provisioned concurrency

Since AWS Lambda now enables the use of ECMAScript (ES) modules in Node.js 14 runtimes, +you could lower the cold start latency when used with Provisioned Concurrency +thanks to the top-level await functionality. (Excerpt taken from @fastify/aws-lambda)

+ + + + \ No newline at end of file diff --git a/docs/0.42.0/guides/deployment/advanced-fly-io-deployment/index.html b/docs/0.42.0/guides/deployment/advanced-fly-io-deployment/index.html new file mode 100644 index 00000000000..841f8adf289 --- /dev/null +++ b/docs/0.42.0/guides/deployment/advanced-fly-io-deployment/index.html @@ -0,0 +1,22 @@ + + + + + +Advanced Fly.io Deployment | Platformatic Open Source Software + + + + + +
+
Version: 0.42.0

Advanced Fly.io Deployment

Techniques used in this guide are based on the Deploy to Fly.io with SQLite +deployment guide.

Adding sqlite for debugging

With a combination of Docker and Fly.io, you can create an easy way to debug +your sqlite aplication without stopping your application or exporting the data. +At the end of this guide, you will be able to run fly ssh console -C db-cli to +be dropped into your remote database.

Start by creating a script for launching the database, calling it db-cli.sh:

#!/bin/sh
set -x
# DSN will be defined in the Dockerfile
sqlite3 $DSN

Create a new Dockerfile which will act as the build and deployment image:

FROM node:18-alpine

# Setup sqlite viewer
RUN apk add sqlite
ENV DSN "/app/.platformatic/data/app.db"
COPY db-cli.sh /usr/local/bin/db-cli
RUN chmod +x /usr/local/bin/db-cli

WORKDIR /app
COPY package.json package.json
COPY package-lock.json package-lock.json

RUN npm ci --omit=dev

COPY platformatic.db.json platformatic.db.json

COPY migrations migrations
# Uncomment if your application is running a plugin
# COPY plugin.js plugin.js

EXPOSE 8080

CMD ["npm", "start"]

Add a start script to your package.json:

{
"scripts": {
"start": "platformatic db"
}
}

With Fly, it becomes straightforward to connect directly to the database by +running the following command from your local machine:

fly ssh console -C db-cli
+ + + + \ No newline at end of file diff --git a/docs/0.42.0/guides/deployment/deploy-to-fly-io-with-sqlite/index.html b/docs/0.42.0/guides/deployment/deploy-to-fly-io-with-sqlite/index.html new file mode 100644 index 00000000000..e3ed54c054d --- /dev/null +++ b/docs/0.42.0/guides/deployment/deploy-to-fly-io-with-sqlite/index.html @@ -0,0 +1,33 @@ + + + + + +Deploy to Fly.io with SQLite | Platformatic Open Source Software + + + + + +
+
Version: 0.42.0

Deploy to Fly.io with SQLite

note

To follow this how-to guide, you'll first need to install the Fly CLI and create +an account by following this official guide. +You will also need an existing Platformatic DB project, please check out our +getting started guide if needed.

Navigate to your Platformatic DB project in the terminal on your local machine. +Run fly launch and follow the prompts. When it asks if you want to deploy +now, say "no" as there are a few things that you'll need to configure first.

You can also create the fly application with one line. This will create your +application in London (lhr):

fly launch --no-deploy --generate-name --region lhr --org personal --path .

The fly CLI should have created a fly.toml file in your project +directory.

Explicit builder

The fly.toml file may be missing an explicit builder setting. To have +consistent builds, it is best to add a build section:

[build]
builder = "heroku/buildpacks:20"

Database storage

Create a volume for database storage, naming it data:

fly volumes create data

This will create storage in the same region as the application. The volume +defaults to 3GB size, use -s to change the size. For example, -s 10 is 10GB.

Add a mounts section in fly.toml:

[mounts]
source = "data"
destination = "/app/.platformatic/data"

Create a directory in your project where your SQLite database will be created:

mkdir -p .platformatic/data

touch .platformatic/data/.gitkeep

The .gitkeep file ensures that this directory will always be created when +your application is deployed.

You should also ensure that your SQLite database is ignored by Git. This helps +avoid inconsistencies when your application is deployed:

echo "*.db" >> .gitignore

The command above assumes that your SQLite database file ends with the extension +.db — if the extension is different then you must change the command to match.

Change the connection string to an environment variable and make sure that +migrations are autoApplying (for platformatic@^0.4.0) in platformatic.db.json:

{
"db": {
"connectionString": "{DATABASE_URL}"
},
"migrations": {
"dir": "./migrations",
"autoApply": true
}
}

Configure server

Make sure that your platformatic.db.json uses environment variables +for the server section:

{
"server": {
"logger": {
"level": "{PLT_SERVER_LOGGER_LEVEL}"
},
"hostname": "{PLT_SERVER_HOSTNAME}",
"port": "{PORT}"
}
}

Configure environment

Start with your local environment, create a .env file and put the following:

PORT=3042
PLT_SERVER_HOSTNAME=127.0.0.1
PLT_SERVER_LOGGER_LEVEL=debug
DATABASE_URL=sqlite://.platformatic/data/movie-quotes.db

Avoid accidental leaks by ignoring your .env file:

echo ".env" >> .gitignore

This same configuration needs to added to fly.toml:

[env]
PORT = 8080
PLT_SERVER_HOSTNAME = "0.0.0.0"
PLT_SERVER_LOGGER_LEVEL = "info"
DATABASE_URL = "sqlite:///app/.platformatic/data/movie-quotes.db"

Deploy application

A valid package.json will be needed so if you do not have one, generate one +by running npm init.

In your package.json, make sure there is a start script to run your +application:

{
"scripts": {
"start": "platformatic db"
}
}

Before deploying, make sure a .dockerignore file is created:

cp .gitignore .dockerignore

Finally, deploy the application to Fly by running:

fly deploy
+ + + + \ No newline at end of file diff --git a/docs/0.42.0/guides/deployment/index.html b/docs/0.42.0/guides/deployment/index.html new file mode 100644 index 00000000000..33a0673de45 --- /dev/null +++ b/docs/0.42.0/guides/deployment/index.html @@ -0,0 +1,46 @@ + + + + + +Deployment | Platformatic Open Source Software + + + + + +
+
Version: 0.42.0

Deployment

Applications built with Platformatic DB can be deployed to a hosting service +in the same way as any other Node.js application. This guide covers a few +things that will help smooth the path from development to production.

Running a Platformatic DB application

Make the Platformatic CLI available

To run a Platformatic DB application, the Platformatic CLI must be available +in the production environment. The most straightforward way of achieving this +is to install it as a project dependency. +This means that when npm install (or npm ci) is run as part of your +build/deployment process, the Platformatic CLI will be installed.

Define an npm run script

A number of hosting services will automatically detect if your project's +package.json has a start npm run script. They will then execute the command +npm start to run your application in production.

You can add platformatic db start as the command for your project's start +npm run script, for example:

{
...
"scripts": {
"start": "platformatic db start",
},
}

Server configuration

info

See the Configuration reference for all +configuration settings.

Configuration with environment variables

We recommend that you use environment variable placeholders +in your Platformatic DB configuration. This will allow you to configure +different settings in your development and production environments.

In development you can set the environment variables via a .env file +that will be automatically loaded by Platformatic DB. For example:

PORT=3042
PLT_SERVER_HOSTNAME=127.0.0.1

In production your hosting provider will typically provide their own mechanism +for setting environment variables.

Configure the server port

Configure the port that the server will listen on by setting an environment +variable placeholder in your Platformatic DB configuration file:

platformatic.db.json
{
"server": {
...
"port": "{PORT}"
},
...
}

Listen on all network interfaces

Most hosting providers require that you configure your server to bind to all +available network interfaces. To do this you must set the server hostname to +0.0.0.0.

This can be handled with an environment variable placeholder in your Platformatic +DB configuration file:

platformatic.db.json
{
"server": {
...
"hostname": "{PLT_SERVER_HOSTNAME}",
},
...
}

The environment variable PLT_SERVER_HOSTNAME should then be set to 0.0.0.0 +in your hosting environment.

Security considerations

We recommend disabling the GraphiQL web UI in production. It can be disabled +with the following configuration:

platformatic.db.json
{
"db": {
...
"graphql": {
"graphiql": false
}
},
...
}

If you want to use this feature in development, replace the configuration +values with environment variable placeholders +so you can set it to true in development and false in production.

Removing the welcome page

If you want to remove the welcome page, you should register an index route.

module.exports = async function (app) {
// removing the welcome page
app.get('/', (req, reply) => {
return { hello: 'world' }
})
}

Databases

Applying migrations

If you're running a single instance of your application in production, it's +best to allow Platformatic DB to automatically run migrations when the server +starts is. This reduces the chance of a currently running instance using a +database structure it doesn't understand while the new version is still being +deployed.

SQLite

When using an SQLite database, you can ensure you don’t commit it to your Git +repository by adding the SQLite database filename to your .gitignore file. +The SQLite database file will be automatically generated by Platformatic DB +when your application migrations are run in production.

+ + + + \ No newline at end of file diff --git a/docs/0.42.0/guides/dockerize-platformatic-app/index.html b/docs/0.42.0/guides/dockerize-platformatic-app/index.html new file mode 100644 index 00000000000..054221960db --- /dev/null +++ b/docs/0.42.0/guides/dockerize-platformatic-app/index.html @@ -0,0 +1,20 @@ + + + + + +Dockerize a Platformatic App | Platformatic Open Source Software + + + + + +
+
Version: 0.42.0

Dockerize a Platformatic App

This guide explains how to create a new Platformatic DB app, which connects to a PostgreSQL database.

We will then create a docker-compose.yml file that will run both services in separate containers

Generate a Platformatic DB App

Run this command in your terminal to start the Platformatic creator wizard:

npm create platformatic@latest

This interactive command-line tool will ask you some questions about how you'd +like to set up your new Platformatic project. For this guide, select these options:

- Which kind of project do you want to create?  => DB
- Where would you like to create your project? => quick-start
- Do you want to create default migrations? => Yes
- Do you want to create a plugin? => Yes
- Do you want to use TypeScript? => No
- Do you want to install dependencies? => Yes (this can take a while)
- Do you want to apply the migrations? => Yes
- Do you want to generate types? => Yes
- Do you want to create the github action to deploy this application to Platformatic Cloud dynamic workspace? => No
- Do you want to create the github action to deploy this application to Platformatic Cloud static workspace? => No

Once the wizard is complete, you'll have a Platformatic app project in the +folder quick-start, with example migration files and a plugin script.

info

Make sure you run the npm/yarn/pnpm command install command manually if you +don't ask the wizard to do it for you.

Create Docker image for the Platformatic DB App

In this step you are going to create some files into the root project directory

  • .dockerignore - This file tells Docker to ignore some files when copying the directory into the image filesystem
node_modules
.env*
  • start.sh - This is our entrypoint. We will run migrations then start platformatic
#!/bin/sh

echo "Running migrations..." && \
npx platformatic db migrations apply && \
echo "Starting Platformatic App..." && \
npm start
info

Make sure you make this file executable with the command chmod +x start.sh

  • Dockerfile - This is the file Docker uses to create the image
FROM node:18-alpine
WORKDIR /usr/src/app
COPY . .
RUN npm install
COPY . .
EXPOSE 3042
CMD [ "./start.sh" ]

At this point you can build your Docker image with the command

$ docker build -t platformatic-app .

Create Docker Compose config file

docker-compose.yml is the configuration file for docker-compose which will spin up containers for both PostgresSQL and our Platformatic App

version: "3.3"
services:
postgresql:
ports:
- "5433:5432"
image: "postgres:15-alpine"
environment:
- POSTGRES_PASSWORD=postgres
platformatic:
ports:
- "3042:3042"
image: 'platformatic-app:latest'
depends_on:
- postgresql
links:
- postgresql
environment:
PLT_SERVER_HOSTNAME: ${PLT_SERVER_HOSTNAME}
PORT: ${PORT}
PLT_SERVER_LOGGER_LEVEL: ${PLT_SERVER_LOGGER_LEVEL}
DATABASE_URL: postgres://postgres:postgres@postgresql:5432/postgres

A couple of things to notice:

  • The Platformatic app is started only once the database container is up and running (depends_on).
  • The Platformatic app is linked with postgresql service. Meaning that inside its container ping postgresql will be resolved with the internal ip of the database container.
  • The environment is taken directly from the .env file created by the wizard

You can now run your containers with

$ docker-compose up # (-d if you want to send them in the background)

Everything should start smoothly, and you can access your app pointing your browser to http://0.0.0.0:3042

To stop the app you can either press CTRL-C if you are running them in the foreground, or, if you used the -d flag, run

$ docker-compose down
+ + + + \ No newline at end of file diff --git a/docs/0.42.0/guides/generate-frontend-code-to-consume-platformatic-rest-api/index.html b/docs/0.42.0/guides/generate-frontend-code-to-consume-platformatic-rest-api/index.html new file mode 100644 index 00000000000..a7e7b2f7418 --- /dev/null +++ b/docs/0.42.0/guides/generate-frontend-code-to-consume-platformatic-rest-api/index.html @@ -0,0 +1,32 @@ + + + + + +Generate Front-end Code to Consume Platformatic REST API | Platformatic Open Source Software + + + + + +
+
Version: 0.42.0

Generate Front-end Code to Consume Platformatic REST API

By default, a Platformatic app exposes REST API that provide CRUD (Create, Read, +Update, Delete) functionality for each entity (see the +Introduction to the REST API +documentation for more information on the REST API).

Platformatic CLI allows to auto-generate the front-end code to import in your +front-end application to consume the Platformatic REST API.

This guide

  • Explains how to create a new Platformatic app.
  • Explains how to configure the new Platformatic app.
  • Explains how to create a new React or Vue.js front-end application.
  • Explains how to generate the front-end TypeScript code to consume the Platformatic app REST API.
  • Provide some React and Vue.js components (either of them written in TypeScript) that read, create, and update an entity.
  • Explains how to import the new component in your front-end application.

Create a new Platformatic app

Run this command in your terminal to start the Platformatic creator wizard:

npm create platformatic@latest

This interactive command-line tool will ask you some questions about how you'd +like to set up your new Platformatic project. For this guide, select these options:

- Which kind of project do you want to create?  => DB
- Where would you like to create your project? => quick-start
- Do you want to create default migrations? => Yes
- Do you want to create a plugin? => Yes
- Do you want to use TypeScript? => No
- Do you want to install dependencies? => Yes (this can take a while)
- Do you want to apply the migrations? => Yes
- Do you want to generate types? => Yes
- Do you want to create the github action to deploy this application to Platformatic Cloud dynamic workspace? => No
- Do you want to create the github action to deploy this application to Platformatic Cloud static workspace? => No

Once the wizard is complete, you'll have a Platformatic app project in the +folder quick-start, with example migration files and a plugin script.

info

Make sure you run the npm/yarn/pnpm command install command manually if you +don't ask the wizard to do it for you.

Configure the new Platformatic app

documentation to create a new Platformatic app. Every Platformatic app uses the "Movie" demo entity and includes +the corresponding table, migrations, and REST API to create, read, update, and delete movies.

Once the new Platformatic app is ready:

  • Set up CORS in platformatic.db.json
{
"$schema": "https://platformatic.dev/schemas/v0.24.0/db",
"server": {
"hostname": "{PLT_SERVER_HOSTNAME}",
"port": "{PORT}",
"logger": {
"level": "{PLT_SERVER_LOGGER_LEVEL}"
},
+ "cors": {
+ "origin": {
+ "regexp": "/*/"
+ }
+ }
},
...
}

You can find more details about the cors configuration here.

  • launch Platformatic through npm start. +Then, the Platformatic app should be available at the http://127.0.0.1:3042/ URL.

Create a new Front-end Application

Refer to the Scaffolding Your First Vite Project +documentation to create a new front-end application, and call it "rest-api-frontend".

info

Please note Vite is suggested only for practical reasons, but the bundler of choice does not make any difference.

If you are using npm 7+ you should run

npm create vite@latest rest-api-frontend -- --template react-ts

and then follow the Vite's instructions

Scaffolding project in /Users/noriste/Sites/temp/platformatic/rest-api-frontend...

Done. Now run:

cd rest-api-frontend
npm install
npm run dev

Once done, the front-end application is available at http://localhost:5174/.

Generate the front-end code to consume the Platformatic app REST API

Now that either the Platformatic app and the front-end app are running, go to the front-end codebase and run the Platformatic CLI

cd rest-api-frontend/src
npx platformatic frontend http://127.0.0.1:3042 ts

Refer to the Platformatic CLI frontend command +documentation to know about the available options.

The Platformatic CLI generates

  • api.d.ts: A TypeScript module that includes all the OpenAPI-related types. +Here is part of the generated code
interface GetMoviesRequest {
'limit'?: number;
'offset'?: number;
// ... etc.
}

interface GetMoviesResponseOK {
'id'?: number;
'title': string;
}


// ... etc.

export interface Api {
setBaseUrl(baseUrl: string): void;
getMovies(req: GetMoviesRequest): Promise<Array<GetMoviesResponseOK>>;
createMovie(req: CreateMovieRequest): Promise<CreateMovieResponseOK>;
// ... etc.
}
  • api.ts: A TypeScript module that includes a typed function for every single OpenAPI endpoint. +Here is part of the generated code
import type { Api } from './api-types'

let baseUrl = ''
export function setBaseUrl(newUrl: string) { baseUrl = newUrl };

export const createMovie: Api['createMovie'] = async (request) => {
const response = await fetch(`${baseUrl}/movies/`, {
method:'post',
body: JSON.stringify(request),
headers: {
'Content-Type': 'application/json'
}
})

if (!response.ok) {
throw new Error(await response.text())
}

return await response.json()
}

// etc.

You can add a --name option to the command line to provide a custom name for the generated files.

cd rest-api-frontend/src
npx platformatic frontend --name foobar http://127.0.0.1:3042 ts

will generated foobar.ts and foobar-types.d.ts

React and Vue.js components that read, create, and update an entity

You can copy/paste the following React or Vue.js components that import the code +the Platformatic CLI generated.

Create a new file src/PlatformaticPlayground.tsx and copy/paste the following code.

import { useEffect, useState } from 'react'

// getMovies, createMovie, and updateMovie are all functions automatically generated by Platformatic
// in the `api.ts` module.
import { getMovies, createMovie, updateMovie, setBaseUrl } from './api'

setBaseUrl('http://127.0.0.1:3042') // configure this according to your needs

export function PlatformaticPlayground() {
const [movies, setMovies] = useState<Awaited<ReturnType<typeof getMovies>>>([])
const [newMovie, setNewMovie] = useState<Awaited<ReturnType<typeof createMovie>>>()

async function onCreateMovie() {
const newMovie = await createMovie({ title: 'Harry Potter' })
setNewMovie(newMovie)
}

async function onUpdateMovie() {
if (!newMovie || !newMovie.id) return

const updatedMovie = await updateMovie({ id: newMovie.id, title: 'The Lord of the Rings' })
setNewMovie(updatedMovie)
}

useEffect(() => {
async function fetchMovies() {
const movies = await getMovies({})
setMovies(movies)
}

fetchMovies()
}, [])

return (
<>
<h2>Movies</h2>

{movies.length === 0 ? (
<div>No movies yet</div>
) : (
<ul>
{movies.map((movie) => (
<li key={movie.id}>{movie.title}</li>
))}
</ul>
)}

<button onClick={onCreateMovie}>Create movie</button>
<button onClick={onUpdateMovie}>Update movie</button>

{newMovie && <div>Title: {newMovie.title}</div>}
</>
)
}

Import the new component in your front-end application

You need to import and render the new component in the front-end application.

Change the App.tsx as follows

import { useState } from 'react'
import reactLogo from './assets/react.svg'
import viteLogo from '/vite.svg'
import './App.css'

+import { PlatformaticPlayground } from './PlatformaticPlayground'

function App() {
const [count, setCount] = useState(0)

return (
<>
+ <PlatformaticPlayground />
<div>
<a href="https://vitejs.dev" target="_blank">
<img src={viteLogo} className="logo" alt="Vite logo" />
</a>
<a href="https://react.dev" target="_blank">
<img src={reactLogo} className="logo react" alt="React logo" />
</a>
</div>
<h1>Vite + React</h1>
<div className="card">
<button onClick={() => setCount((count) => count + 1)}>count is {count}</button>
<p>
Edit <code>src/App.tsx</code> and save to test HMR
</p>
</div>
<p className="read-the-docs">Click on the Vite and React logos to learn more</p>
</>
)
}

export default App

Have fun

Art the top of the front-end application the new component requests the movies to the Platformatic app and list them.

Platformatic frontend guide: listing the movies

Click on "Create movie" to create a new movie called "Harry Potter".

Platformatic frontend guide: creating a movie

Click on "Update movie" to rename "Harry Potter" into "Lord of the Rings".

Platformatic frontend guide: editing a movie

Reload the front-end application to see the new "Lord of the Rings" movie listed.

Platformatic frontend guide: listing the movies +.

+ + + + \ No newline at end of file diff --git a/docs/0.42.0/guides/jwt-auth0/index.html b/docs/0.42.0/guides/jwt-auth0/index.html new file mode 100644 index 00000000000..a50792af5b5 --- /dev/null +++ b/docs/0.42.0/guides/jwt-auth0/index.html @@ -0,0 +1,21 @@ + + + + + +Configure JWT with Auth0 | Platformatic Open Source Software + + + + + +
+
Version: 0.42.0

Configure JWT with Auth0

Auth0 is a powerful authentication and authorization service provider that can be integrated with Platformatic DB through JSON Web Tokens (JWT) tokens. +When a user is authenticated, Auth0 creates a JWT token with all necessary security informations and custom claims (like the X-PLATFORMATIC-ROLE, see User Metadata) and signs the token.

Platformatic DB needs the correct public key to verify the JWT signature. +The fastest way is to leverage JWKS, since Auth0 exposes a JWKS endpoint for each tenant. +Given a Auth0 tenant's issuer URL, the (public) keys are accessible at ${issuer}/.well-known/jwks.json. +For instance, if issuer is: https://dev-xxx.us.auth0.com/, the public keys are accessible at https://dev-xxx.us.auth0.com/.well-known/jwks.json

To configure Platformatic DB authorization to use JWKS with Auth0, set:


...
"authorization": {
"jwt": {
"jwks": {
"allowedDomains": [
"https://dev-xxx.us.auth0.com/"
]
}
},
}
...

danger

Note that specify allowedDomains is critical to correctly restrict the JWT that MUST be issued from one of the allowed domains.

Custom Claim Namespace

In Auth0 there are restrictions about the custom claim that can be set on access tokens. One of these is that the custom claims MUST be namespaced, i.e. we cannot have X-PLATFORMATIC-ROLE but we must specify a namespace, e.g.: https://platformatic.dev/X-PLATFORMATIC-ROLE

To map these claims to user metadata removing the namespace, we can specify the namespace in the JWT options:

...
"authorization": {
"jwt": {
"namespace": "https://platformatic.dev/",
"jwks": {
"allowedDomains": [
"https://dev-xxx.us.auth0.com/"
]
}
},
}
...

With this configuration, the https://platformatic.dev/X-PLATFORMATIC-ROLE claim is mapped to X-PLATFORMATIC-ROLE user metadata.

+ + + + \ No newline at end of file diff --git a/docs/0.42.0/guides/migrating-express-app-to-platformatic-service/index.html b/docs/0.42.0/guides/migrating-express-app-to-platformatic-service/index.html new file mode 100644 index 00000000000..1f0f3236003 --- /dev/null +++ b/docs/0.42.0/guides/migrating-express-app-to-platformatic-service/index.html @@ -0,0 +1,17 @@ + + + + + +Migrating an Express app to Platformatic Service | Platformatic Open Source Software + + + + + +
+
Version: 0.42.0

Migrating an Express app to Platformatic Service

Introduction

Our open-source tools are built on top of the modern and flexible Fastify web framework. It provides logging, request validation and a powerful plugin system out-of-the-box, as well as incredible performance.

If you have an existing Express application, migrating it to Fastify could potentially be time consuming, and might not be something that you're able to prioritise right now. You can however still take advantage of Fastify and our open-source tools. In this guide you'll learn how to use the @fastify/express plugin to help you rapidly migrate your existing Express application to use Platformatic Service.

This guide assumes that you have some experience building applications with the Express framework.

Example Express application

For the purpose of this guide, we have a basic example Express application. Although this app has a specific structure, the migration steps covered in this guide can generally be applied to any Express application.

The code for the example Express and migrated Platformatic Service applications is available on GitHub.

Here's the structure of the example Express application:

├── app.js
├── package.json
├── routes
│ └── users.js
└── server.js

It has the following dependencies:

// package.json

"dependencies": {
"express": "^4.18.2"
}

The application has routes in routes/users.js:

// routes/users.js

import express from 'express'

const router = express.Router()

router.use(express.json())

router.post('/', function createUser(request, response, next) {
const newUser = request.body

if (!newUser) {
return next(new Error('Error creating user'))
}

response.status(201).json(newUser)
})

router.get('/:user_id', function getUser(request, response, next) {
const user = {
id: Number(request.params.user_id),
first_name: 'Bobo',
last_name: 'Oso'
}

response.json(user)
})

export const usersRoutes = router

In app.js, we have a factory function that creates a new Express server instance and mounts the routes:

// app.js

import express from 'express'

import { usersRoutes } from './routes/users.js'

export default function buildApp() {
const app = express()

app.use('/users', usersRoutes)

return app
}

And in server.js we're calling the factory function and starting the server listening for HTTP requests:

// server.js

import buildApp from './app.js'

const express = buildApp()

express.listen(3042, () => {
console.log('Example app listening at http://localhost:3042')
})

The routes in your Express application should be mounted on an Express router (or multiple routers if needed). This will allow them to be mounted using @fastify/express when you migrate your app to Platformatic Service.

Creating a new Platformatic Service app

To migrate your Express app to Platformatic Service, create a new Platformatic Service app with:

npm create platformatic@latest

Be sure to select Service as the project type. You should also say yes when you're asked if you want to create the GitHub Actions workflows for deploying your application to Platformatic Cloud.

Once the project has been created, you can delete the example plugins and routes directories.

Using ES modules

If you're using ES modules in the Express application code that you'll be migrating, ensure that there's a type field in package.json set to module:

npm pkg set type=module

Migrate the Express routes

Copy over the routes directory from your Express app.

Install @fastify/express

Install the @fastify/express Fastify plugin to add full Express compability to your Platformatic Service app:

npm install @fastify/express

Mounting the Express routes

Create a root Fastify plugin that register's the @fastify/express plugin and loads your Express routes:

// root-plugin.js

import { usersRoutes } from './routes/users.js'

/** @param {import('fastify').FastifyInstance} app */
export default async function (app) {
await app.register(import('@fastify/express'))

app.use('/users', usersRoutes)
}

Configuring the Platformatic Service app

Edit your app's platformatic.service.json to load your root plugin:

// platformatic.service.json

{
...,
"plugins": {
"paths": [{
"path": "./root-plugin.js",
"encapsulate": false
}],
"hotReload": false
},
"watch": false
}

These settings are important when using @fastify/express in a Platformatic Service app:

  • encapsulate — You'll need to disable encapsulation for any Fastify plugin which mounts Express routes. This is due to the way that @fastify/express works.
  • hotReload and watch — You'll need to disable hot reloading and watching for your app, as they don't currently work when using @fastify/express. This is a known issue that we're working to fix.

Wrapping up

You can learn more about building Node.js apps with Platformatic service in the Platformatic Service documentation.

Once you've migrated your Express app to use Platformatic Service with @fastify/express, you might then want to consider fully migrating your Express routes and application code to Fastify. This tutorial shows how you can approach that migration process: How to migrate your app from Express to Fastify (video).

+ + + + \ No newline at end of file diff --git a/docs/0.42.0/guides/migrating-fastify-app-to-platformatic-service/index.html b/docs/0.42.0/guides/migrating-fastify-app-to-platformatic-service/index.html new file mode 100644 index 00000000000..34f997b2ea6 --- /dev/null +++ b/docs/0.42.0/guides/migrating-fastify-app-to-platformatic-service/index.html @@ -0,0 +1,17 @@ + + + + + +Migrating a Fastify app to Platformatic Service | Platformatic Open Source Software + + + + + +
+
Version: 0.42.0

Migrating a Fastify app to Platformatic Service

Introduction

Building production ready Node.js application with Fastify can require a certain amount of boilerplate code. This is a side effect of some of Fastify's technical principles:

  • If it can be a plugin, it should be a pluginPlugins help with the separation of concerns, they improve testability, and also provide a way to logically organise and structure your applications.
  • Developer choice = developer freedom — Fastify only applies a few strong opinions, in key areas such as logging and validation. The framework features have been designed to give you the freedom to build your applications however you want.
  • You know your needs best — Fastify doesn't make assumptions about what plugins you'll need in your application. As the Fastify plugin ecosystem and the community has grown, a clear group of popular plugin choices has emerged.

Platformatic Service is the natural evolution of the build-it-from-scratch Fastify development experience. It provides a solid foundation for building Node.js applications on top of Fastify, with best practices baked in.

See the Building apps with Platformatic Service section of this guide to learn more about the built-in features.

The good news is that the path to migrate a Fastify application to use Platformatic Service is fairly straightforward. This guide covers some of the things you'll need to know when migrating an application, as well as tips on different migration approaches.

This guide assumes that you have some experience building applications with the Fastify framework. If you'd like to learn more about about building web applications with Fastify, we recommend taking a look at:

Example Fastify application

For the purpose of this guide, we have a basic example Fastify application. Although this app has a specific structure, the migration steps covered in this guide can generally be applied to any Fastify application.

The code for the example Fastify and migrated Platformatic Service applications is available on GitHub.

Here's the structure of the example Fastify application:

├── app.js
├── package.json
├── plugins
│   └── data-source.js
├── routes
│   ├── movies.js
│   └── quotes.js
├── server.js
└── test
└── routes.test.js

It has the following dependencies:

// package.json

"dependencies": {
"fastify": "^4.17.0",
"fastify-plugin": "^4.5.0"
}

The application has a plugin that decorates the Fastify server instance, as well as two Fastify plugins which define API routes. Here's the code for them:

// plugins/data-source.js

import fastifyPlugin from 'fastify-plugin'

/** @param {import('fastify').FastifyInstance} app */
async function dataSource (app) {
app.decorate('movies', [
'Jaws',
'Star Wars',
'The Wizard of Oz'
])

app.decorate('quotes', [
'You\'re gonna need a bigger boat.',
'May the Force be with you.',
'Toto, I\'ve got a feeling we\'re not in Kansas anymore.'
])
}

export default fastifyPlugin(dataSource)

fastify-plugin is used to to prevent Fastify from creating a new encapsulation context for the plugin. This makes the decorators that are registered in the dataSource plugin available in the route plugins. You can learn about this fundamental Fastify concept in the Fastify Encapsulation documentation.

// routes/movies.js

/** @param {import('fastify').FastifyInstance} app */
export default async function movieRoutes (app) {
app.get('/', async (request, reply) => {
return app.movies
})
}
// routes/quotes.js

/** @param {import('fastify').FastifyInstance} app */
export default async function quotesRoutes (app) {
app.get('/', async (request, reply) => {
return app.quotes
})
}

The route plugins aren't registering anything that needs to be available in other plugins. They have their own encapsulation context and don't need to be wrapped with fastify-plugin.

There's also a buildApp() factory function in app.js, which takes care of creating a new Fastify server instance and registering the plugins and routes:

// app.js

import fastify from 'fastify'

export async function buildApp (options = {}) {
const app = fastify(options)

app.register(import('./plugins/data-source.js'))

app.register(import('./routes/movies.js'), { prefix: '/movies' })
app.register(import('./routes/quotes.js'), { prefix: '/quotes' })

return app
}

And server.js, which calls the buildApp function to create a new Fastify server, and then starts it listening:

// server.js

import { buildApp } from './app.js'

const port = process.env.PORT || 3042
const host = process.env.HOST || '127.0.0.1'

const options = {
logger: {
level: 'info'
}
}

const app = await buildApp(options)

await app.listen({ port, host })

As well as a couple of tests for the API routes:

// tests/routes.test.js

import { test } from 'node:test'
import assert from 'node:assert/strict'

import { buildApp } from '../app.js'

test('Basic API', async (t) => {
const app = await buildApp()

t.after(async () => {
await app.close()
})

await t.test('GET request to /movies route', async () => {
const response = await app.inject({
method: 'GET',
url: '/movies'
})

assert.equal(response.statusCode, 200)
assert.deepEqual(response.json(), [
'Jaws',
'Star Wars',
'The Wizard of Oz'
])
})

await t.test('GET request to /quotes route', async () => {
const response = await app.inject({
method: 'GET',
url: '/quotes'
})

assert.equal(response.statusCode, 200)
assert.deepEqual(response.json(), [
'You\'re gonna need a bigger boat.',
'May the Force be with you.',
'Toto, I\'ve got a feeling we\'re not in Kansas anymore.'
])
})
})

These tests are using the built in Node.js test runner, node:test. They can be run with the command: node --test --test-reporter=spec test/*.test.js.

The @param lines in this application code are JSDoc blocks that import the FastifyInstance type. This allows many code editors to provide auto-suggest, type hinting and type checking for your code.

Creating a new Platformatic Service app

To migrate your Fastify app to Platformatic Service, create a new Platformatic Service app with:

npm create platformatic@latest

Be sure to select Service as the project type. Once the project has been created, you can delete the example plugins and routes directories.

App configuration

The configuration for the Platformatic Service app is stored in platformatic.service.json.

The generated configuration is set up to load plugins from the plugins and routes directories:

// platformatic.service.json

"plugins": {
"paths": [
"./plugins",
"./routes"
]
}

The value for any configuration setting in platformatic.service.json can be replaced with an environment variable by adding a placeholder, for example {PLT_SERVER_LOGGER_LEVEL}. In development, environment variables are automatically loaded by your Platformatic Service app from a .env file in the root directory of your app. In production, you'll typically set these environment variables using a feature provided by your hosting provider.

See the Platformatic Service documentation for Environment variable placeholders to learn more about how this works.

Using ES modules

If you're using ES modules in the Fastify application code that you'll be migrating, ensure that there's a type field in package.json set to module:

npm pkg set type=module

Refactoring Fastify server factories

If your Fastify application has a script with a factory function to create and build up a Fastify server instance, you can refactor it into a Fastify plugin and use it in your Platformatic Service app.

Here are a few things to consider while refactoring it:

  • Move the options you're passing to Fastify when creating a new server instance to the server block in platformatic.service.json. These options will be passed through directly by Platformatic Service when it creates a Fastify server instance.
  • You can create a root plugin to be loaded by your Platformatic Service app, for example: export default async function rootPlugin (app, options) { ... }
  • When you copy the code from your factory function into your root plugin, remove the code which is creating the Fastify server instance.
  • You can configure your Platformatic Service to load the root plugin, for example:
    "plugins": {
    "paths": ["./root-plugin.js"]
    }
  • If you need to pass options to your root plugin, you can do it like this:
    "plugins": {
    "paths": [
    {
    "path": "./root-plugin.js",
    "options": {
    "someOption": true
    }
    }
    ]
    }

Migrating plugins

Copy over the plugins directory from your Fastify app. You shouldn't need to make any modifications for them to work with Platformatic Service.

Disabling plugin encapsulation

Platformatic Service provides a configuration setting which enables you to disable encapsulation for a plugin, or all the plugins within a directory. This will make any decorators or hooks that you set in those plugins available to all other plugins. This removes the need for you to wrap your plugins with fastify-plugin.

To disable encapsulation for all plugins within the plugins directory, you would set your plugins configuration like this in platformatic.service.json:

// platformatic.service.json

"plugins": {
"paths": [
{
"path": "./plugins",
"encapsulate": false
},
"./routes"
]
}

You can learn more about plugin encapsulation in the Fastify Plugins Guide.

Migrating routes

Copy over the routes directory from your Fastify app.

Explicit route paths

If you're registering routes in your Fastify application with full paths, for example /movies, you won't need to make any changes to your route plugins.

Route prefixing with file-system based routing

If you're using the prefix option when registering route plugins in your Fastify application, for example:

app.register(import('./routes/movies.js'), { prefix: '/movies' })

You can achieve the same result with Platformatic Service by using file-system based routing. With the following directory and file structure:

routes/
├── movies
│   └── index.js
└── quotes
└── index.js

Assuming that both of the route files register a / route, these are the route paths that will be registered in your Platformatic Service app:

/movies
/quotes

With the example Fastify application, this would mean copying the route files over to these places in the Platformatic Service app:

routes/movies.js -> routes/movies/index.js
routes/quotes.js -> routes/quotes/index.js

How does this work? Plugins are loaded with the @fastify/autoload Fastify plugin. The dirNameRoutePrefix plugin option for @fastify/autoload is enabled by default. This means that "routes will be automatically prefixed with the subdirectory name in an autoloaded directory".

If you'd prefer not to use file-system based routing with Platformatic Service, you can add prefixes to the paths for the routes themselves (see Explicit route paths).

Adapting existing usage of @fastify/autoload

If you're using @fastify/autoload in your Fastify application, there are a couple of approaches you can take when migrating the app to Platformatic Service:

  • Configure plugins in your Platformatic Service app's platformatic.service.json. It will then take care of loading your routes and plugins for you with @fastify/autoload (configuration documentation).
  • You can continue to use @fastify/autoload directly with a little refactoring. See the tips in the Refactoring Fastify server factories section.

Migrating tests

You'll generally use the Platformatic CLI to start your Platformatic Service app (npx platformatic start). However for testing, you can use the programmatic API provided by Platformatic Service. This allows you to load your app in your test scripts and then run tests against it.

If you copy over the tests from your existing Fastify app, they will typically only require a small amount of refactoring to work with Platformatic Service.

Replacing your Fastify server factory function

The example Fastify app has a buildApp() factory function which creates a Fastify server instance. The import line for that function can be removed from tests/routes.test.js:

// tests/routes.test.js

import { buildApp } from '../app.js'

And replaced with an import of the buildServer() function from @platformatic/service:

// tests/routes.test.js

import { buildServer } from '@platformatic/service'

You can then load your Platformatic Service app like this:


const app = await buildServer('./platformatic.service.json')

Disabling server logging in your tests

If you have logged enabled for your Platformatic Service app, you'll probably want to disable the logging in your tests to remove noise from the output that you receive when you run your tests.

Instead of passing the path to your app's configuration to buildServer(), you can import the app configuration and disable logging:

// tests/routes.test.js

import serviceConfig from '../platformatic.service.json' assert { type: 'json' }

serviceConfig.server.logger = false

Then pass that serviceConfig configuration object to the buildServer() function:

// tests/routes.test.js

const app = await buildServer(serviceConfig)

Import assertions — the assert { type: 'json' } syntax — are not a stable feature of the JavaScript language, so you'll receive warning messages from Node.js when running your tests. You can disable these warnings by passing the --no-warnings flag to node.

Building apps with Platformatic Service

Because Platformatic Service is built on top of the Fastify framework, you're able to use the full functionality of the Fastify framework in your Platformatic Service app. This includes:

  • Fast, structured logging, provided by Pino
  • Request validation with JSON Schema and Ajv (other validation libraries are supported too)
  • Hooks, which allow fine grained control over when code is run during the request/response lifecycle.
  • Decorators, which allow you to customize core Fastify objects and write more modular code.

Platformatic Service also provides many other features that are built on top of Fastify.

Application features

All Platformatic Service features are fully configurable via platformatic.service.json.

Development features

  • Hot reloading — Your server will automatically reload in development as you develop features.
  • Write your plugins in JavaScript or TypeScript — TypeScript support is provided out-of-the-box and supports hot reloading.
  • Pretty printed logs — Making it easier to understand and debug your application during development.

See the Platformatic Service Configuration documentation for all of the features which can be configured.

Next steps

The documentation for Platformatic Service is a helpful reference when building a Platformatic Service app.

Watch: Understand the parts of a Platformatic app

You want to be confident that you understand how your applications work. In this video you'll learn about the parts that make up a Platformatic application, what each part does, and how they fit together.

Our series of Platformatic How-to videos can help get you up and running building apps with Platformatic open-source tools.

Got questions or need help migrating your Fastify app to use Platformatic Service? Drop by our Discord server and we'll be happy to help you.

+ + + + \ No newline at end of file diff --git a/docs/0.42.0/guides/monitoring/index.html b/docs/0.42.0/guides/monitoring/index.html new file mode 100644 index 00000000000..164ee897484 --- /dev/null +++ b/docs/0.42.0/guides/monitoring/index.html @@ -0,0 +1,24 @@ + + + + + +Monitoring with Prometheus and Grafana | Platformatic Open Source Software + + + + + +
+
Version: 0.42.0

Monitoring with Prometheus and Grafana

Prometheus is open source system and alerting toolkit for monitoring and alerting. It's a time series database that collects metrics from configured targets at given intervals, evaluates rule expressions, displays the results, and can trigger alerts if some condition is observed to be true. +Grafana is an open source visualization and analytics software.

It's a pretty common solution to use Prometheus to collect and store monitoring data, and Grafana to visualize it.

Platformatic can be configured to expose Prometheus metrics:

...
"metrics": {
"port": 9091,
"auth": {
"username": "platformatic",
"password": "mysecret"
}
}
...

In this case, we are exposing the metrics on port 9091 (defaults to 9090), and we are using basic authentication to protect the endpoint. +We can also specify the IP address to bind to (defaults to 0.0.0.0). +Note that the metrics port is not the default in this configuration. This is because if you want to test the integration running both Prometheus and Platformatic on the same host, Prometheus starts on 9090 port too. +All the configuration settings are optional. To use the default settings, set "metrics": true. See the configuration reference for more details.

caution

Use environment variable placeholders in your Platformatic DB configuration file to avoid exposing credentials.

Prometheus Configuration

This is an example of a minimal Prometheus configuration to scrape the metrics from Platformatic:

global:
scrape_interval: 15s
scrape_timeout: 10s
evaluation_interval: 1m
scrape_configs:
- job_name: 'platformatic'
scrape_interval: 2s
metrics_path: /metrics
scheme: http
basic_auth:
username: platformatic
password: mysecret
static_configs:
- targets: ['192.168.69.195:9091']
labels:
group: 'platformatic'

We specify a target configuring the IP address and the port where Platformatic is running, and we specify the username and password to use for basic authentication. The metrics path is the one used by Platformatic. The ip address is not a loopback address so this will work even with Prometheus running in docker on the same host (see below), please change it to your host ip.

To test this configuration, we can run Prometheus locally using docker and docker-compose, so please be sure to have both correctly installed. +Save the above configuration in a file named ./prometheus/prometheus.yml and create a docker-compose.yml:

version: "3.7"

services:
prometheus:
image: prom/prometheus:latest
volumes:
- prometheus_data:/prometheus
- ./prometheus/prometheus.yml:/etc/prometheus/prometheus.yml
command:
- '--config.file=/etc/prometheus/prometheus.yml'
ports:
- '9090:9090'

volumes:
prometheus_data: {}

Then run docker-compose up -d and open http://localhost:9090 in your browser. You should see the Prometheus dashboard, and you can also query the metrics, e.g. {group="platformatic"}. See Prometheus docs for more information on querying and metrics.

Grafana Configuration

Let's see how we can configure Grafana to chart some Platformatics metrics from Prometheus. +Change the docker-compose.yml to add a grafana service:

version: "3.7"
services:

prometheus:
image: prom/prometheus:latest
volumes:
- prometheus_data:/prometheus
- ./prometheus/prometheus.yml:/etc/prometheus/prometheus.yml
command:
- '--config.file=/etc/prometheus/prometheus.yml'
ports:
- '9090:9090'

grafana:
image: grafana/grafana:latest
volumes:
- grafana_data:/var/lib/grafana
environment:
- GF_SECURITY_ADMIN_PASSWORD=pleasechangeme
depends_on:
- prometheus
ports:
- '3000:3000'

volumes:
prometheus_data: {}
grafana_data: {}

In Grafana, select Configuration -> Data Sources -> Add Data Source, and select Prometheus. +In the URL field, specify the URL of the Prometheus server, e.g. http://prometheus:9090 (the name of the service in the docker-compose file), then Save & Test.

Now we can create a dashboard and add panels to it. Select the Prometheus data source, and add queries. You should see the metrics exposed by Platformatic.

It's also possible to import pre-configured dashboards, like this one from Grafana.com.

+ + + + \ No newline at end of file diff --git a/docs/0.42.0/guides/packaging-an-application-as-a-module/index.html b/docs/0.42.0/guides/packaging-an-application-as-a-module/index.html new file mode 100644 index 00000000000..d4155529388 --- /dev/null +++ b/docs/0.42.0/guides/packaging-an-application-as-a-module/index.html @@ -0,0 +1,27 @@ + + + + + +Packaging a Platformatic Application as a module | Platformatic Open Source Software + + + + + +
+
Version: 0.42.0

Packaging a Platformatic Application as a module

Platformatic Service and Platformatic DB +offer a good starting point to create new applications. However, most developers or organizations might want to +create reusable services or applications built on top of Platformatic. +This is useful to publish the application on the public npm registry (or a private one!), including building your own CLI, +or to create a specialized template for your organization to allow for centralized bugfixes and updates.

This process is the same one we use to maintain Platformatic DB and Platformatic Composer on top of Platformatic Service.

Creating a custom Service

We are creating the module foo.js as follows:

const { schema, platformaticService } = require('@platformatic/service')

/** @type {import('fastify').FastifyPluginAsync<{}>} */
async function foo (app, opts) {
const text = app.platformatic.config.foo.text
app.get('/foo', async (request, reply) => {
return text
})

await platformaticService(app, opts)
}

foo.configType = 'foo'

// break Fastify encapsulation
foo[Symbol.for('skip-override')] = true

// The schema for our configuration file
foo.schema = {
$id: 'https://example.com/schemas/foo.json',
title: 'Foo Service',
type: 'object',
properties: {
server: schema.server,
plugins: schema.plugins,
metrics: schema.metrics,
watch: {
anyOf: [schema.watch, {
type: 'boolean'
}, {
type: 'string'
}]
},
$schema: {
type: 'string'
},
module: {
type: 'string'
},
foo: {
type: 'object',
properties: {
text: {
type: 'string'
}
},
required: ['text']
}
},
additionalProperties: false,
required: ['server']
}

// The configuration for the ConfigManager
foo.configManagerConfig = {
schema: foo.schema,
envWhitelist: ['PORT', 'HOSTNAME'],
allowToWatch: ['.env'],
schemaOptions: {
useDefaults: true,
coerceTypes: true,
allErrors: true,
strict: false
}
}

module.exports = foo

Note that the $id property of the schema identifies the module in our system, +allowing us to retrieve the schema correctly. +It is recommended, but not required, that the JSON schema is actually +published in this location. Doing so allows tooling such as the VSCode +language server to provide autocompletion.

In this example, the schema adds a custom top-level foo property +that users can use to configure this specific module.

ESM is also supported.

Consuming a custom application

Consuming foo.js is simple. We can create a platformatic.json file as follows:

{
"$schema": "https://example.com/schemas/foo.json",
"module": "./foo",
"server": {
"port": 0,
"hostname": "127.0.0.1"
},
"foo": {
"text": "Hello World"
}
}

Note that we must specify both the $schema property and module. +Module can also be any modules published on npm and installed via your package manager.

Building your own CLI

It is possible to build your own CLI with the following cli.mjs file:

import foo from './foo.js'
import { start } from '@platformatic/service'
import { printAndExitLoadConfigError } from '@platformatic/config'

await start(foo, process.argv.splice(2)).catch(printConfigValidationErrors)

This will also load platformatic.foo.json files.

+ + + + \ No newline at end of file diff --git a/docs/0.42.0/guides/prisma/index.html b/docs/0.42.0/guides/prisma/index.html new file mode 100644 index 00000000000..61befe438f1 --- /dev/null +++ b/docs/0.42.0/guides/prisma/index.html @@ -0,0 +1,17 @@ + + + + + +Integrate Prisma with Platformatic DB | Platformatic Open Source Software + + + + + +
+
Version: 0.42.0

Integrate Prisma with Platformatic DB

Prisma is an open-source ORM for Node.js and TypeScript. It is used as an alternative to writing SQL, or using another database access tool such as SQL query builders (like knex.js) or ORMs (like TypeORM and Sequelize). Prisma currently supports PostgreSQL, MySQL, SQL Server, SQLite, MongoDB, and CockroachDB.

Prisma can be used with JavaScript or TypeScript, and provides a level to type-safety that goes beyond the guarantees made by other ORMs in the TypeScript ecosystem. You can find an in-depth comparison of Prisma against other ORMs here.

If you want to get a quick overview of how Prisma works, you can follow the Quickstart or read the Introduction in the Prisma documentation.

How Prisma can improve your workflow with Platformatic DB

While Platformatic speeds up development of your REST and GraphQL APIs, Prisma can complement the workflow in several ways:

  1. Provides an intuitive data modeling language
  2. Provides auto-generated and customizable SQL migrations
  3. Provides type-safety and auto-completion for your database queries

You can learn more about why Prisma and Platformatic are a great match this article.

Prerequisites

To follow along with this guide, you will need to have the following:

Setup Prisma

Install the Prisma CLI and the db-diff development dependencies in your project:

npm install --save-dev prisma @ruheni/db-diff

Next, initialize Prisma in your project

npx prisma init

This command does the following:

  • Creates a new directory called prisma which contains a file called schema.prisma. This file defines your database connection and the Prisma Client generator.
  • Creates a .env file at the root of your project if it doesn't exist. This defines your environment variables (used for your database connection).

You can specify your preferred database provider using the --datasource-provider flag, followed by the name of the provider:

npx prisma init --datasource-provider postgresql # or sqlite, mysql, sqlserver, cockroachdb

Prisma uses the DATABASE_URL environment variable to connect to your database to sync your database and Prisma schema. It also uses the variable to connect to your database to run your Prisma Client queries.

If you're using PostgreSQL, MySQL, SQL Server, or CockroachDB, ensure that the DATABASE_URL used by Prisma is the same as the one used by Platformatic DB project. If you're using SQLite, refer to the Using Prisma with SQLite section.

If you have an existing project, refer to the Adding Prisma to an existing Platformatic DB project section. If you're adding Prisma to a new project, refer to the Adding Prisma to a new project.

Adding Prisma to an existing project

If you have an existing Platformatic DB project, you can introspect your database and generate the data model in your Prisma schema with the following command:

npx prisma db pull

The command will introspect your database and generate the data model

Next, add the @@ignore attribute to the versions model to exclude it from the Prisma Client API:

model versions {
version BigInt @id
name String?
md5 String?
run_at DateTime? @db.Timestamptz(6)

+ @@ignore
}

To learn how you can evolve your database schema, you can jump to the Evolving your database schema section.

Adding Prisma to a new project

Define a Post model with the following fields at the end of your schema.prisma file:

prisma/schema.prisma
model Post {
id Int @id @default(autoincrement())
title String
content String?
published Boolean @default(false)
viewCount Int @default(0)
createdAt DateTime @default(now())

@@map("posts")
}

The snippet above defines a Post model with the following fields and properties:

  • id: An auto-incrementing integer that will be the primary key for the model.
  • title: A non-nullable String field.
  • content: A nullable String field.
  • published: A Boolean field with a default value of false.
  • viewCount: An Int field with a default value of 0.
  • createdAt: A DateTime field with a timestamp of when the value is created as its default value.

By default, Prisma maps the model name and its format to the table name — which is also used im Prisma Client. Platformatic DB uses a snake casing and pluralized table names to map your table names to the generated API. The @@map() attribute in the Prisma schema allows you to define the name and format of your table names to be used in your database. You can also use the @map() attribute to define the format for field names to be used in your database. Refer to the Foreign keys and table names naming conventions section to learn how you can automate formatting foreign keys and table names.

Next, run the following command to generate an up and down migration:

npx db-diff

The previous command will generate both an up and down migration based on your schema. The generated migration is stored in your ./migrations directory. If you are currently using a different path to store the migration, you can provide the --migrations-dir flag followed by the path.

You can then apply the generated migration using the Platformatic DB CLI:

npx platformatic db migrations apply

Platformatic uses Postgrator to run migrations. Postgrator creates a table in the database called versions to track the applied migrations. Since the versions table is not yet captured in the Prisma schema, run the following command to introspect the database and populate it with the missing model:

npx prisma db pull

Introspecting the database to populate the model prevents including the versions table in the generated down migrations.

Your Prisma schema should now contain a versions model that is similar to this one (it will vary depending on the database system you're using):

model Post {
id Int @id @default(autoincrement())
title String
content String?
published Boolean @default(false)
viewCount Int @default(0)
createdAt DateTime @default(now())

@@map("posts")
}

+model versions {
+ version BigInt @id
+ name String?
+ md5 String?
+ run_at DateTime? @db.Timestamptz(6)
+}

Add the @@ignore attribute function to the model to exclude it from the Prisma Client API:

model versions {
version BigInt @id
name String?
md5 String?
run_at DateTime? @db.Timestamptz(6)

+ @@ignore
}

Evolving your database schema

Update the data model in your Prisma schema by adding a model or a field:

// based on the schema in the "Adding Prisma to a new project" section
+model User {
+ id Int @id @default(autoincrement())
+ email String @unique
+ name String?
+ posts Post[]
+
+ @@map("users")
+}

model Post {
id Int @id @default(autoincrement())
createdAt DateTime @default(now())
title String
content String?
published Boolean @default(false)
viewCount Int @default(0)
+ author User? @relation(fields: [authorId], references: [id])
+ authorId Int? @map("author_id")

@@map("posts")
}

Next, use the @ruheni/db-diff CLI tool to generate up and down migrations:

npx db-diff

This command will generate up and down migrations based off of your Prisma schema. If you are currently using a different path to store the migration, you can provide the --migrations-dir flag followed by the path.

Next, apply the generated migration using the Platformatic CLI:

npx platformatic db migrations apply

And you're done!

Using Prisma Client in your plugins

Plugins allow you to add custom functionality to your REST and GraphQL API. Refer to the Add Custom Functionality to learn more how you can add custom functionality.

danger

Prisma Client usage with Platformatic is currently only supported in Node v18

You can use Prisma Client to interact with your database in your plugin.

To get started, run the following command:

npx prisma generate

The above command installs the @prisma/client in your project and generates a Prisma Client based off of your Prisma schema.

Install @sabinthedev/fastify-prisma fastify plugin. The plugin takes care of shutting down database connections and makes Prisma Client available as a Fastify plugin.

npm install @sabinthedev/fastify-prisma

Register the plugin and extend your REST API:

// 1.
const prismaPlugin = require("@sabinthedev/fastify-prisma")

module.exports = async (app) => {
app.log.info('plugin loaded')

// 2.
app.register(prismaPlugin)

/**
* Plugin logic
*/
// 3.
app.put('/post/:id/views', async (req, reply) => {

const { id } = req.params

// 4.
const post = await app.prisma.post.update({
where: {
id: Number(id)
},
data: {
viewCount: {
increment: 1
}
}
})

// 5.
return reply.send(post)
})
}

The snippet does the following:

  1. Imports the plugin
  2. Registers the @sabinthedev/fastify-prisma
  3. Defines the endpoint for incrementing the views of a post
  4. Makes a query to the database on the Post model to increment a post's view count
  5. Returns the updated post on success

If you would like to extend your GraphQL API, extend the schema and define the corresponding resolver:

plugin.js
// ./plugin.js
const prismaPlugin = require("@sabinthedev/fastify-prisma")

module.exports = async (app) => {
app.log.info('plugin loaded')

app.graphql.extendSchema(`
extend type Mutation {
incrementPostViewCount(id: ID): Post
}
`)

app.graphql.defineResolvers({
Mutation: {
incrementPostViewCount: async (_, { id }) => {
const post = await prisma.post.update({
where: {
id: Number(id)
},
data: {
viewCount: {
increment: 1
}
}
})

if (!post) throw new Error(`Post with id:${id} was not found`)
return post
}
}
})
}

Start the server:

npx platformatic db start

The query should now be included in your GraphQL schema.

You can also use the Prisma Client in your REST API endpoints.

Workarounds

Using Prisma with SQLite

Currently, Prisma doesn't resolve the file path of a SQLite database the same way as Platformatic does.

If your database is at the root of the project, create a new environment variable that Prisma will use called PRISMA_DATABASE_URL:

# .env
DATABASE_URL="sqlite://db.sqlite"
PRISMA_DATABASE_URL="file:../db.sqlite"

Next, update the url value in the datasource block in your Prisma schema with the updated value:

prisma/schema.prisma
// ./prisma/schema.prisma
datasource db {
provider = "sqlite"
url = env("PRISMA_DATABASE_URL")
}

Running migrations should now work smoothly and the path will be resolved correctly.

Foreign keys, field, and table names naming conventions

Foreign key names should use underscores, e.g. author_id, for Platformatic DB to correctly map relations. You can use the @map("") attribute to define the names of your foreign keys and field names to be defined in the database.

Table names should be mapped to use the naming convention expected by Platformatic DB e.g. @@map("recipes") (the Prisma convention is Recipe, which corresponds with the model name).

You can use prisma-case-format to enforce your own database conventions, i.e., pascal, camel, and snake casing.

Learn more

If you would like to learn more about Prisma, be sure to check out the Prisma docs.

+ + + + \ No newline at end of file diff --git a/docs/0.42.0/guides/securing-platformatic-db/index.html b/docs/0.42.0/guides/securing-platformatic-db/index.html new file mode 100644 index 00000000000..a01879a942f --- /dev/null +++ b/docs/0.42.0/guides/securing-platformatic-db/index.html @@ -0,0 +1,31 @@ + + + + + +Securing Platformatic DB with Authorization | Platformatic Open Source Software + + + + + +
+
Version: 0.42.0

Securing Platformatic DB with Authorization

Introduction

Authorization in Platformatic DB is role-based. User authentication and the +assignment of roles must be handled by an external authentication service. +Take a look to at the reference documentation for Authorization.

The goal of this simple guide is to protect an API built with Platformatic DB +with the use of a shared secret, that we call adminSecret. We want to prevent +any user that is not an admin to access the data.

The use of an adminSecret is a simplistic way of securing a system. +It is a crude way for limiting access and not suitable for production systems, +as the risk of leaking the secret is high in case of a security breach. +A production friendly way would be to issue a machine-to-machine JSON Web Token, +ideally with an asymmetric key. Alternatively, you can defer to an external +service via a Web Hook.

Please refer to our guide to set up Auth0 for more information +on JSON Web Tokens.

Block access to all entities, allow admins

The following configuration will block all anonymous users (e.g. each user without a known role) +to access every entity:

{
...
"authorization": {
"adminSecret": "replaceWithSomethingRandomAndSecure"
}
}

The data will still be available if the X-PLATFORMATIC-ADMIN-SECRET HTTP header +is specified when making HTTP calls, like so:

curl -H 'X-PLATFORMATIC-ADMIN-SECRET: replaceWithSomethingRandomAndSecure' http://127.0.0.1:3042/pages
info

Configuring JWT or Web Hooks will have the same result of configuring an admin secret.

Authorization rules

Rules can be provided based on entity and role in order to restrict access and provide fine grained access. +To make an admin only query and save the page table / page entity using adminSecret this structure should be used in the platformatic.db configuration file:

  ...
"authorization": {
"adminSecret": "easy",
"rules": [{
"entity": "movie"
"role": "platformatic-admin",
"find": true,
"save": true,
"delete": false,
}
]
}
info

Note that the role of an admin user from adminSecret strategy is platformatic-admin by default.

Read-only access to anonymous users

The following configuration will allo all anonymous users (e.g. each user without a known role) +to access the pages table / page entity in Read-only mode:

{
...
"authorization": {
"adminSecret": "replaceWithSomethingRandomAndSecure"
"rules": [{
"role": "anonymous",
"entity": "page",
"find": true,
"save": false,
"delete": false
}]
}
}

Note that we set find as true to allow the access, while the other options are false.

Work in Progress

This guide is a Work-In-Progress. Let us know what other common authorization use cases we should cover.

+ + + + \ No newline at end of file diff --git a/docs/0.42.0/guides/seed-a-database/index.html b/docs/0.42.0/guides/seed-a-database/index.html new file mode 100644 index 00000000000..a47c10748bf --- /dev/null +++ b/docs/0.42.0/guides/seed-a-database/index.html @@ -0,0 +1,21 @@ + + + + + +Seed a Database | Platformatic Open Source Software + + + + + +
+
Version: 0.42.0

Seed a Database

A database is as useful as the data that it contains: a fresh, empty database +isn't always the best starting point. We can add a few rows from our migrations +using SQL, but we might need to use JavaScript from time to time.

The platformatic db seed command allows us to run a +script that will populate — or "seed" — our database.

Example

Our seed script should export a Function that accepts an argument: +an instance of @platformatic/sql-mapper.

seed.js
'use strict'

module.exports = async function ({ entities, db, sql }) {
await entities.graph.save({ input: { name: 'Hello' } })
await db.query(sql`
INSERT INTO graphs (name) VALUES ('Hello 2');
`)
}

We can then run the seed script with the Platformatic CLI:

npx platformatic db seed seed.js
+ + + + \ No newline at end of file diff --git a/docs/0.42.0/guides/telemetry/index.html b/docs/0.42.0/guides/telemetry/index.html new file mode 100644 index 00000000000..45d68061fae --- /dev/null +++ b/docs/0.42.0/guides/telemetry/index.html @@ -0,0 +1,21 @@ + + + + + +Telemetry with Jaeger | Platformatic Open Source Software + + + + + +
+
Version: 0.42.0

Telemetry with Jaeger

Introduction

Platformatic supports Open Telemetry integration. This allows you to send telemetry data to one of the OTLP compatible servers (see here) or to a Zipkin server. Let's show this with Jaeger.

Jaeger setup

The quickest way is to use docker:

docker run -d --name jaeger \
-e COLLECTOR_OTLP_ENABLED=true \
-p 16686:16686 \
-p 4317:4317 \
-p 4318:4318 \
jaegertracing/all-in-one:latest

Check that the server is running by opening http://localhost:16686/ in your browser.

Platformatic setup

Will test this with a Platformatic Composer that proxy requests to a Platformatic Service, which in turn invokes a Platformatic DB Service. +In this way we show that the telemetry is propagated from the Composer throughout the services and the collected correctly. +Let's setup all this components:

Platformatic DB Service

Create a folder for DB and cd into it:

mkdir test-db
cd test-db

Then create a db in the folder using npx create-platformatic@latest:

npx create-platformatic@latest

To make it simple, use sqlite and create/apply the default migrations. This DB Service is exposed on port 5042:


➜ npx create-platformatic@latest

Hello user, welcome to Platformatic 0.32.0!
Let's start by creating a new project.
? Which kind of project do you want to create? DB
? Where would you like to create your project? .
? What database do you want to use? SQLite
? Do you want to use the connection string "sqlite://./db.sqlite"? Confirm
? Do you want to create default migrations? yes
? Do you want to create a plugin? no
? Do you want to use TypeScript? no
? What port do you want to use? 5042
[15:40:46] INFO: Configuration file platformatic.db.json successfully created.
[15:40:46] INFO: Environment file .env successfully created.
[15:40:46] INFO: Migrations folder migrations successfully created.
[15:40:46] INFO: Migration file 001.do.sql successfully created.
[15:40:46] INFO: Migration file 001.undo.sql successfully created.
[15:40:46] INFO: Plugin file created at plugin.js
? Do you want to run npm install? no
? Do you want to apply migrations? yes
...done!
? Do you want to generate types? no
? Do you want to create the github action to deploy this application to Platformatic Cloud dynamic workspace? no
? Do you want to create the github action to deploy this application to Platformatic Cloud static workspace? no

All done! Please open the project directory and check the README.
Will test this in one example with a Platformatic Composer that proxy requests to a Platformatic Service, which in turn invokes a Platformatic DB.

Open the platformatic.db.json file and add the telementry configuration:

  "telemetry": {
"serviceName": "test-db",
"exporter": {
"type": "otlp",
"options": {
"url": "http://localhost:4318/v1/traces"
}
}
}

Finally, start the DB service:

npx platformatic db start

Platformatic Service

Create at the same level of test-db another folder for Service and cd into it:

mkdir test-service
cd test-service

Then create a service on the 5043 port in the folder using npx create-platformatic@latest:

➜ npx create-platformatic@latest

Hello user, welcome to Platformatic 0.32.0!
Let's start by creating a new project.
? Which kind of project do you want to create? Service
? Where would you like to create your project? .
? Do you want to run npm install? no
? Do you want to use TypeScript? no
? What port do you want to use? 5043
[15:55:35] INFO: Configuration file platformatic.service.json successfully created.
[15:55:35] INFO: Environment file .env successfully created.
[15:55:35] INFO: Plugins folder "plugins" successfully created.
[15:55:35] INFO: Routes folder "routes" successfully created.
? Do you want to create the github action to deploy this application to Platformatic Cloud dynamic workspace? no
? Do you want to create the github action to deploy this application to Platformatic Cloud static workspace? no

Open the platformatic.service.json file and add the following telemetry configuration (it's exactly the same as DB, but with a different serviceName)

  "telemetry": {
"serviceName": "test-service",
"exporter": {
"type": "otlp",
"options": {
"url": "http://localhost:4318/v1/traces"
}
}
}

We want this service to invoke the DB service, so we need to add a client for test-db to it:

npx platformatic client http://127.0.0.1:5042 js --name movies

Check platformatic.service.json to see that the client has been added (PLT_MOVIES_URL is defined in .env):

    "clients": [
{
"schema": "movies/movies.openapi.json",
"name": "movies",
"type": "openapi",
"url": "{PLT_MOVIES_URL}"
}
]

Now open routes/root.js and add the following:

  fastify.get('/movies-length', async (request, reply) => {
const movies = await request.movies.getMovies()
return { length: movies.length }
})

This code calls movies to get all the movies and returns the length of the array.

Finally, start the service:

npx platformatic service start

Platformatic Composer

Create at the same level of test-db and test-service another folder for Composer and cd into it:

mkdir test-composer
cd test-composer

Then create a composer on the 5044 port in the folder using npx create-platformatic@latest:

➜ npx create-platformatic@latest

Hello marcopiraccini, welcome to Platformatic 0.32.0!
Let's start by creating a new project.
? Which kind of project do you want to create? Composer
? Where would you like to create your project? .
? What port do you want to use? 5044
? Do you want to run npm install? no
[16:05:28] INFO: Configuration file platformatic.composer.json successfully created.
[16:05:28] INFO: Environment file .env successfully created.
? Do you want to create the github action to deploy this application to Platformatic Cloud dynamic workspace? no
? Do you want to create the github action to deploy this application to Platformatic Cloud static workspace? no

All done! Please open the project directory and check the README.

Open platformatic.composer.js and change it to the following:

{
"$schema": "https://platformatic.dev/schemas/v0.32.0/composer",
"server": {
"hostname": "{PLT_SERVER_HOSTNAME}",
"port": "{PORT}",
"logger": {
"level": "{PLT_SERVER_LOGGER_LEVEL}"
}
},
"composer": {
"services": [
{
"id": "example",
"origin": "http://127.0.0.1:5043",
"openapi": {
"url": "/documentation/json"
}
}
],
"refreshTimeout": 3000
},
"telemetry": {
"serviceName": "test-composer",
"exporter": {
"type": "otlp",
"options": {
"url": "http://localhost:4318/v1/traces"
}
}
},
"watch": true
}

Note that we just added test-service as origin of the proxed service and added the usual telementry configuration, with a different serviceName.

Finally, start the composer:

npx platformatic composer start

Run the Test

Check that the composer is exposing movies-length opening: http://127.0.0.1:5044/documentation/

You should see: +image

To add some data, we can POST directly to the DB service (port 5042):

curl -X POST -H "Content-Type: application/json" -d '{"title":"The Matrix"}' http://127.0.0.1:5042/movies 
curl -X POST -H "Content-Type: application/json" -d '{"title":"The Matrix Reloaded"}' http://127.0.0.1:5042/movies

Now, let's check that the composer (port 5044) is working:

curl http://127.0.0.1:5044/movies-length

If the composer is working correctly, you should see:

{"length":2}

However, the main interest of this example is to show how to use the Platformatic Telemetry, so let's check it. +Open the Jaeger UI at http://localhost:16686/ and you should see something like this:

image

Select on the left the test-composer service and the GET /movies-length operation, click on "Find traces" and you should see something like this:

image

You can then click on the trace and see the details:

image

Note that everytime a request is received or client call is done, a new span is started. So we have:

  • One span for the request received by the test-composer
  • One span for the client call to test-service
  • One span for the request received by test-service
  • One span for the client call to test-db
  • One span for the request received by test-db

All these spans are linked together, so you can see the whole trace.

What if you want to use Zipkin?

Starting from this example, it's also possible to run the same test using Zipkin. To do so, you need to start the Zipkin server:

docker run -d -p 9411:9411 openzipkin/zipkin

Then, you need to change the telemetry configuration in all the platformatic.*.json to the following (only the exporter object is different`)

  "telemetry": {
(...)
"exporter": {
"type": "zipkin",
"options": {
"url": "http://127.0.0.1:9411/api/v2/spans"
}
}
}

The zipkin ui is available at http://localhost:9411/

+ + + + \ No newline at end of file diff --git a/docs/0.42.0/platformatic-cloud/deploy-database-neon/index.html b/docs/0.42.0/platformatic-cloud/deploy-database-neon/index.html new file mode 100644 index 00000000000..85e665b3615 --- /dev/null +++ b/docs/0.42.0/platformatic-cloud/deploy-database-neon/index.html @@ -0,0 +1,32 @@ + + + + + +Deploy a PostgreSQL database with Neon | Platformatic Open Source Software + + + + + +
+
Version: 0.42.0

Deploy a PostgreSQL database with Neon

Neon offers multi-cloud fully managed +Postgres with a generous free tier. They separated storage and +compute to offer autoscaling, branching, and bottomless storage. +It offers a great environment for creating database preview +environments for your Platformatic DB +applications.

This guide shows you how to integrate Neon branch deployments with your +Platformatic app's GitHub Actions workflows. It assumes you have already +followed the Quick Start Guide.

Create a project on Neon

To set up an account with Neon, open their website, sign up and create a +new project.

Take note of the following configuration setting values:

  • The connection string for your main branch database, to be stored in a NEON_DB_URL_PRODUCTION secret
  • The Project ID (available under the project Settings), to be stored in a NEON_PROJECT_ID secret
  • Your API key (available by clicking on your user icon > Account > Developer settings), to be stored under NEON_API_KEY

You can learn more about Neon API keys in their Manage API Keys documentation.

Configure Github Environments and Secrets

Now you need to set the configuration values listed above as +repository secrets +on your project's GitHub repository. +Learn how to use environments for deployment in GitHub's documentation.

Configure the GitHub Environments for your repository to have:

  • production secrets, available only to the main branch:
    • NEON_DB_URL_PRODUCTION
  • previews secrets available to all branches:
    • NEON_PROJECT_ID
    • NEON_API_KEY

Configure the main branch workflow

Replace the contents of your app's workflow for static workspace deployment:

.github/workflows/platformatic-static-workspace-deploy.yml
name: Deploy Platformatic application to the cloud
on:
push:
branches:
- main
paths-ignore:
- 'docs/**'
- '**.md'

jobs:
build_and_deploy:
environment:
name: production
permissions:
contents: read
runs-on: ubuntu-latest
steps:
- name: Checkout application project repository
uses: actions/checkout@v4
- name: npm install --omit=dev
run: npm install --omit=dev
- name: Deploy project
uses: platformatic/onestep@latest
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
platformatic_workspace_id: <YOUR_STATIC_WORKSPACE_ID>
platformatic_workspace_key: ${{ secrets.PLATFORMATIC_STATIC_WORKSPACE_API_KEY }}
platformatic_config_path: ./platformatic.db.json
secrets: DATABASE_URL
env:
DATABASE_URL: ${{ secrets.NEON_DB_URL_PRODUCTION }}
PLT_SERVER_LOGGER_LEVEL: info
PORT: 3042
PLT_SERVER_HOSTNAME: 127.0.0.1

Replace <YOUR_STATIC_WORKSPACE_ID> with the workspace ID that you previously had in this file.

When your app is deployed to the static workspace it will now be configured to connect to the +main branch database for your Neon project.

Configure the preview environment workflow

Neon allows up to 10 database branches on their free tier. You can automatically create a new +database branch when a pull request is opened, and then automatically remove it when the pull +request is merged.

GitHub Action to create a preview environment

Replace the contents of your app's workflow for dynamic workspace deployment:

.github/workflows/platformatic-dynamic-workspace-deploy.yml
name: Deploy to Platformatic cloud
on:
pull_request:
paths-ignore:
- 'docs/**'
- '**.md'

# This allows a subsequently queued workflow run to interrupt previous runs
concurrency:
group: "${{ github.workflow }} @ ${{ github.event.pull_request.head.label || github.head_ref || github.ref }}"
cancel-in-progress: true

jobs:
build_and_deploy:
runs-on: ubuntu-latest
environment:
name: development
steps:
- name: Checkout application project repository
uses: actions/checkout@v4
- name: npm install --omit=dev
run: npm install --omit=dev
- name: Get PR number
id: get_pull_number
run: |
pull_number=$(jq --raw-output .pull_request.number "$GITHUB_EVENT_PATH")
echo "pull_number=${pull_number}" >> $GITHUB_OUTPUT
echo $pull_number
- uses: neondatabase/create-branch-action@v4
with:
project_id: ${{ secrets.NEON_PROJECT_ID }}
branch_name: pr-${{ steps.get_pull_number.outputs.pull_number }}
api_key: ${{ secrets.NEON_API_KEY }}
id: create-branch
- name: Deploy project
uses: platformatic/onestep@latest
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
platformatic_workspace_id: ${{ secrets.PLATFORMATIC_DYNAMIC_WORKSPACE_ID }}
platformatic_workspace_key: ${{ secrets.PLATFORMATIC_DYNAMIC_WORKSPACE_KEY }}
platformatic_config_path: ./platformatic.db.json
env:
DATABASE_URL: ${{ steps.create-branch.outputs.db_url }}
PLT_SERVER_LOGGER_LEVEL: info
PORT: 3042
PLT_SERVER_HOSTNAME: 127.0.0.1

Replace <YOUR_DYNAMIC_WORKSPACE_ID> with the workspace ID that you previously had in this file.

Configure preview environment cleanup

After a pull request to the main branch is merged, you should remove the matching database branch.

Create a new file, .github/workflows/cleanup-neon-branch-db.yml, and copy and paste in the following +workflow configuration:

.github/workflows/cleanup-neon-branch-db.yml
name: Cleanup Neon Database Branch
on:
push:
branches:
- 'main'
jobs:
delete-branch:
environment:
name: development
permissions: write-all
runs-on: ubuntu-latest
steps:
- name: Get PR info
id: get-pr-info
uses: actions-ecosystem/action-get-merged-pull-request@v1.0.1
with:
github_token: ${{secrets.GITHUB_TOKEN}}
- run: |
echo ${{ steps.get-pr-info.outputs.number}}
- name: Delete Neon Branch
if: ${{ steps.get-pr-info.outputs.number }}
uses: neondatabase/delete-branch-action@v3
with:
project_id: ${{ secrets.NEON_PROJECT_ID }}
branch: pr-${{ steps.get-pr-info.outputs.number }}
api_key: ${{ secrets.NEON_API_KEY }}

Deployment

To deploy these changes to your app:

  1. Create a Git branch locally (git checkout -b <BRANCH_NAME>)
  2. Commit your changes and push them to GitHub
  3. Open a pull request on GitHub - a branch will automatically be created for your Neon database and a preview app will be deployed to Platformatic Cloud (in your app's dynamic workspace).
  4. Merge the pull request - the Neon databsase branch will be automatically deleted and your app will be deployed to Platformatic Cloud (in your app's static workspace).
+ + + + \ No newline at end of file diff --git a/docs/0.42.0/platformatic-cloud/pricing/index.html b/docs/0.42.0/platformatic-cloud/pricing/index.html new file mode 100644 index 00000000000..059ca8d1815 --- /dev/null +++ b/docs/0.42.0/platformatic-cloud/pricing/index.html @@ -0,0 +1,23 @@ + + + + + +Platformatic Cloud Pricing | Platformatic Open Source Software + + + + + +
+
Version: 0.42.0

Platformatic Cloud Pricing

Find the plan that works best for you!

FreeBasicAdvancedPro
Pricing$0$4.99$22.45$49.99
Slots01512
CNAME-truetruetrue
Always On-truetruetrue

FAQ

What is a slot?

One slot is equal to one compute unit. The free plan has no always-on +machines and they will be stopped while not in use.

What is a workspace?

A workspace is the security boundary of your deployment. You will use +the same credentials to deploy to one.

A workspace can be either static or dynamic. +A static workspace always deploy to the same domain, while +in a dynamic workspace each deployment will have its own domain. +The latter are useful to provde for pull request previews.

Can I change or upgrade my plan after I start using Platformatic?

Plans can be changed or upgraded at any time

What does it mean I can set my own CNAME?

Free applications only gets a *.deploy.space domain name to access +their application. All other plans can set it to a domain of their chosing.

+ + + + \ No newline at end of file diff --git a/docs/0.42.0/platformatic-cloud/quick-start-guide/index.html b/docs/0.42.0/platformatic-cloud/quick-start-guide/index.html new file mode 100644 index 00000000000..98d25d25917 --- /dev/null +++ b/docs/0.42.0/platformatic-cloud/quick-start-guide/index.html @@ -0,0 +1,45 @@ + + + + + +Cloud Quick Start Guide | Platformatic Open Source Software + + + + + +
+
Version: 0.42.0

Cloud Quick Start Guide

This guide shows you how to create and deploy an application to +Platformatic Cloud.

Prerequisites

To follow along with this guide you'll need to have these things installed:

You will also need to have a GitHub account.

Log in to Platformatic Cloud

Go to the Platformatic Cloud website and click on the +Continue with GitHub button. You'll be transferred to a GitHub page that +asks you to Authorize Platformatic Cloud. To continue, click on the +Authorize platformatic button.

Screenshot of Continue with GitHub button

On the Platformatic Cloud Service Agreements page, check the boxes and +click the Continue button. You'll then be redirected to your Cloud Dashboard page.

Create a Cloud app

Screenshot of an empty Apps page

Click the Create an app now button on your Cloud Dashboard page.

Enter quick-start-app as your application name. Click the Create Application button.

Create a static app workspace

Enter production as the name for your workspace. Then click on the Create Workspace button.

On the next page you'll see the Workspace ID and API key for your app workspace. +Copy them and store them somewhere secure for future reference, for example in a password manager app. +The API key will be used to deploy your app to the workspace that you've just created.

Click on the Back to dashboard button.

Create a dynamic app workspace

On your Cloud Dashboard, click on your app, then click on Create Workspace in the Workspaces +sidebar.

Screenshot of the create app workspace screen

The Dynamic Workspace option will be automatically enabled as you have already created a +static workspace. Dynamic workspaces can be used to deploy preview applications for GitHub +pull requests.

Enter development as the name for your workspace, then click on the Create Workspace button. +Copy the Workspace ID and API key and store them somewhere secure.

Create a GitHub repository

Go to the Create a new repository page on GitHub. +Enter quick-start-app as the Repository name for your new repository. +Click on the Add a README file checkbox and click the Create repository +button.

Add the workspace API keys as repository secrets

Go to the Settings tab on your app's GitHub repository. Click into the +Secrets and variables > Actions section and add the following secrets:

NameSecret
PLATFORMATIC_STATIC_WORKSPACE_IDYour app's static workspace ID
PLATFORMATIC_STATIC_WORKSPACE_API_KEYYour app's static workspace API key
PLATFORMATIC_DYNAMIC_WORKSPACE_IDYour app's dynamic workspace ID
PLATFORMATIC_DYNAMIC_WORKSPACE_API_KEYYour app's dynamic workspace API key

Click on the New repository secret button to add a secret.

tip

You can also use the GitHub CLI to set secrets on your GitHub repository, for example:

gh secret set \
--app actions \
--env-file <FILENAME_OF_ENV_FILE_WITH_SECRETS> \
--repos <YOUR_GITHUB_USERNAME>/<REPO_NAME>

Create a new Platformatic app

In your terminal, use Git to clone your repository from GitHub. For example:

git clone git@github.com:username/quick-start-app.git
tip

See the GitHub documentation for help with +Cloning a repository.

Now change in to the project directory:

cd quick-start-app

Now run this command to start the Platformatic creator wizard:

npm create platformatic@latest

This interactive command-line tool will ask you some questions about how you'd +like to set up your new Platformatic app. For this guide, select these options:

- Which kind of project do you want to create?     => DB
- Where would you like to create your project? => .
- Do you want to create default migrations? => yes
- Do you want to create a plugin? => yes
- Do you want to use TypeScript? => no
- Do you want to overwrite the existing README.md? => yes
- Do you want to run npm install? => yes (this can take a while)
- Do you want to apply the migrations? => yes
- Do you want to generate types? => yes
- Do you want to create the github action to deploy this application to Platformatic Cloud dynamic workspace? => yes
- Do you want to create the github action to deploy this application to Platformatic Cloud static workspace? => yes

Copy and paste your dynamic and static workspace IDs when prompted by the creator wizard.

Once the wizard is complete, you'll have a Platformatic app project in the +quick-start-app directory, with example migration files and a plugin script.

Deploy the app

In your project directory, commit your application with Git:

git add .

git commit -m "Add Platformatic app"

Now push your changes up to GitHub:

git push origin main

On the GitHub repository page in your browser click on the Actions tab. +You should now see the Platformatic Cloud deployment workflow running.

Test the deployed app

Screenshot of a static app workspace that has had an app deployed to it

Once the GitHub Actions deployment workflow has completed, go to the production workspace +for your app in Platformatic Cloud. Click on the link for the Entry Point. You should now +see the Platformatic DB app home page.

Click on the OpenAPI Documentation link to try out your app's REST API using the Swagger UI.

Screenshot of Swagger UI for a Platformatic DB app

Preview pull request changes

When a pull request is opened on your project's GitHub repository, a preview app will automatically +be deployed to your app's dynamic workspace.

To see a preview app in action, create a new Git branch:

git checkout -b add-hello-endpoint

Then open up your app's plugin.js file in your code editor. Add the following code inside +the existing empty function:

app.get('/hello', async function(request, reply) {
return { hello: 'from Platformatic Cloud' }
})

Save the changes, then commit and push them up to GitHub:

git add plugin.js

git commit -m "Add hello endpoint"

git push -u origin add-hello-endpoint

Now create a pull request for your changes on GitHub. At the bottom of the +pull request page you'll see that a deployment has been triggered to your +app's dynamic workspace.

Screenshot of checks on a GitHub pull request

Once the deployment has completed, a comment will appear on your pull request +with a link to the preview app.

Screenshot of a deployed preview app comment on a GitHub pull request

Click on the Application URL link. If you add /hello on to the URL, +you should receive a response from the endpoint that you just added to +your application.

Screenshot of a JSON response from an API endpoint

+ + + + \ No newline at end of file diff --git a/docs/0.42.0/reference/cli/index.html b/docs/0.42.0/reference/cli/index.html new file mode 100644 index 00000000000..4caad35f925 --- /dev/null +++ b/docs/0.42.0/reference/cli/index.html @@ -0,0 +1,44 @@ + + + + + +Platformatic CLI | Platformatic Open Source Software + + + + + +
+
Version: 0.42.0

Platformatic CLI

Installation and usage

Install the Platformatic CLI as a dependency for your project:

npm install platformatic

Once it's installed you can run it with:

npx platformatic
info

The platformatic package can be installed globally, but installing it as a +project dependency ensures that everyone working on the project is using the +same version of the Platformatic CLI.

Commands

The Platformatic CLI provides the following commands:

help

Welcome to Platformatic. Available commands are:

  • help - display this message.
  • help <command> - show more information about a command.
  • db - start Platformatic DB; type platformatic db help to know more.
  • service - start Platformatic Service; type platformatic service help to know more.
  • upgrade - upgrade the Platformatic configuration to the latest version.
  • gh - create a new gh action for Platformatic deployments.
  • deploy - deploy a Platformatic application to the cloud.
  • runtime - start Platformatic Runtime; type platformatic runtime help to know more.
  • start - start a Platformatic application.
  • frontend- create frontend code to consume the REST APIs.

compile

Compile all typescript plugins.

  $ platformatic compile

This command will compile the TypeScript plugins for each platformatic application.

deploy

Deploys an application to the Platformatic Cloud.

 $ platformatic deploy

Options:

  • -t, --type static/dynamic - The type of the workspace.
  • -c, --config FILE - Specify a configuration file to use.
  • -k, --keys FILE - Specify a path to the workspace keys file.
  • -l --label TEXT - The deploy label. Only for dynamic workspaces.
  • -e --env FILE - The environment file to use. Default: ".env"
  • -s --secrets FILE - The secrets file to use. Default: ".secrets.env"
  • --workspace-id uuid - The workspace id where the application will be deployed.
  • --workspace-key TEXT - The workspace key where the application will be deployed.
  1. To deploy a Platformatic application to the cloud, you should go to the Platformatic cloud dashboard and create a workspace.
  2. Once you have created a workspace, retrieve your workspace id and key from the workspace settings page. Optionally, you can download the provided workspace env file, which you can use with the --keys option.

ℹ️

When deploying an application to a dynamic workspace, specify the deploy --label option. You can find it on your cloud dashboard or you can specify a new one.

gh

Creates a gh action to deploy platformatic services on workspaces.

 $ platformatic gh -t dynamic

Options:

  • -w --workspace ID - The workspace ID where the service will be deployed.
  • -t, --type static/dynamic - The type of the workspace. Defaults to static.
  • -c, --config FILE - Specify a configuration file to use.
  • -b, --build - Build the service before deploying (npm run build).

If not specified, the configuration will be loaded from any of the following, in the current directory.

  • platformatic.db.json, or
  • platformatic.db.yml, or
  • platformatic.db.tml, or
  • platformatic.service.json, or
  • platformatic.service.yml, or
  • platformatic.service.tml

You can find more details about the configuration format here:

start

Start a Platformatic application with the following command:

$ platformatic start

Options:

  • -c, --config <path> - Path to the configuration file.
  • --inspect[=[host:]port] - Start the Node.js debugger. host defaults to '127.0.0.1'. port defaults to 9229. Use caution when binding to a public host:port combination.
  • --inspect-brk[=[host:]port] - Start the Node.js debugger and block until a client has attached. host defaults to '127.0.0.1'. port defaults to 9229. Use caution when binding to a public host:port combination.

upgrade

Upgrade the Platformatic schema configuration to the latest version.

 $ platformatic upgrade

Options:

  • -c, --config FILE - Specify a schema configuration file to use.

If not specified, the configuration will be loaded from any of the following, in the current directory.

  • platformatic.db.json, or
  • platformatic.db.yml, or
  • platformatic.db.tml, or
  • platformatic.service.json, or
  • platformatic.service.yml, or
  • platformatic.service.tml

You can find more details about the configuration format here:

client

platformatic client <command>

help

Create a Fastify plugin that exposes a client for a remote OpenAPI or GraphQL API.

To create a client for a remote OpenAPI API, you can use the following command:

$ platformatic client http://example.com/to/schema/file -n myclient

To create a client for a remote Graphql API, you can use the following command:

$ platformatic client http://example.com/graphql -n myclient

Instead of a URL, you can also use a local file:

$ platformatic client path/to/schema -n myclient

This will create a Fastify plugin that exposes a client for the remote API in a folder myclient +and a file named myclient.js inside it.

If platformatic config file is specified, it will be edited and a clients section will be added. +Then, in any part of your Platformatic application you can use the client.

You can use the client in your application in Javascript, calling a GraphQL endpoint:

module.exports = async function (app, opts) {
app.post('/', async (request, reply) => {
const res = await app.myclient.graphql({
query: 'query { hello }'
})
return res
})
}

or in Typescript, calling an OpenAPI endpoint:

import { FastifyInstance } from 'fastify'
/// <reference path="./myclient" />

export default async function (app: FastifyInstance) {
app.get('/', async () => {
return app.myclient.get({})
})
}

Options:

  • -c, --config <path> - Path to the configuration file.
  • -n, --name <name> - Name of the client.
  • -f, --folder <name> - Name of the plugin folder, defaults to --name value.
  • -t, --typescript - Generate the client plugin in TypeScript.
  • --full-response - Client will return full response object rather than just the body.
  • --full-request - Client will be called with all parameters wrapped in body, headers and query properties.
  • --full - Enables both --full-request and --full-response overriding them.
  • --optional-headers <headers> - Comma separated string of headers that will be marked as optional in the type file
  • --validate-response - If set, will validate the response body against the schema.

composer

platformatic composer <command>

help

Available commands:

  • help - show this help message.
  • help <command> - shows more information about a command.
  • start - start the server.
  • openapi schemas fetch - fetch OpenAPI schemas from services.

openapi schemas fetch

Fetch OpenAPI schemas from remote services to use in your Platformatic project.

  $ platformatic composer openapi schemas fetch

It will fetch all the schemas from the remote services and store them by path +set in the platformatic.composer.json file. If the path is not set, it will +skip fetching the schema.

start

Start the Platformatic Composer server with the following command:

 $ platformatic composer start

You will need a configuration file. Here is an example to get you started, +save the following as platformatic.composer.json:

  {
"server": {
"hostname": "127.0.0.1",
"port": 0,
"logger": {
"level": "info"
}
},
"composer": {
"services": [
{
"id": "service1",
"origin": "http://127.0.0.1:3051",
"openapi": {
"url": "/documentation/json"
}
},
{
"id": "service2",
"origin": "http://127.0.0.1:3052",
"openapi": {
"file": "./schemas/service2.openapi.json"
}
}
],
"refreshTimeout": 1000
}
}

By sending the SIGUSR2 signal, the server can be reloaded.

Options:

  • -c, --config FILE - Specify a configuration file to use.

If not specified, the configuration will be loaded from any of the following, in the current directory.

  • platformatic.composer.json, or
  • platformatic.composer.yml, or
  • platformatic.composer.tml

You can find more details about the configuration format here:

db

platformatic db <command>

compile

Compile typescript plugins.

  $ platformatic db compile

As a result of executing this command, the Platformatic DB will compile typescript +plugins in the outDir directory.

If not specified, the configuration will be loaded from any of the following, in the current directory.

  • platformatic.db.json, or
  • platformatic.db.yml, or
  • platformatic.db.tml

You can find more details about the configuration format here:

help

Available commands:

  • help - show this help message.
  • help <command> - shows more information about a command.
  • start - start the server.
  • compile - compile typescript plugins.
  • seed - run a seed file.
  • types - generate typescript types for entities.
  • schema - generate and print api schema.
  • migrations create - generate do and undo migration files.
  • migrations apply - apply migration files.

migrations apply

Apply all configured migrations to the database:

  $ platformatic db migrations apply

The migrations will be applied in the order they are specified in the +folder defined in the configuration file. If you want to apply a specific migration, +you can use the --to option:

  $ platformatic db migrations apply --to 001

Here is an example migration:

  CREATE TABLE graphs (
id SERIAL PRIMARY KEY,
name TEXT
);

You can always rollback to a specific migration with:

  $ platformatic db migrations apply --to VERSION

Use 000 to reset to the initial state.

Options:

  • -c, --config <path> - Path to the configuration file.
  • -t, --to <version> - Migrate to a specific version.

If not specified, the configuration will be loaded from any of the following, in the current directory.

  • platformatic.db.json, or
  • platformatic.db.yml, or
  • platformatic.db.tml

You can find more details about the configuration format here:

migrations create

Create next migration files.

  $ platformatic db migrations create

It will generate do and undo sql files in the migrations folder. The name of the +files will be the next migration number.

  $ platformatic db migrations create --name "create_users_table"

Options:

  • -c, --config <path> - Path to the configuration file.

If not specified, the configuration will be loaded from any of the following, in the current directory.

  • platformatic.db.json, or
  • platformatic.db.yml, or
  • platformatic.db.tml

You can find more details about the configuration format here:

migrations

Available commands:

  • migrations create - generate do and undo migration files.
  • migrations apply - apply migration files.

schema

Update the config schema file:

  • schema config - update the JSON schema config available on platformatic.db.schema.json

Your configuration on platformatic.db.json has a schema defined to improve the developer experience and avoid mistakes when updating the configuration of Platformatic DB. +When you run platformatic db init, a new JSON $schema property is added in platformatic.db.schema.json. This can allow your IDE to add suggestions (f.e. mandatory/missing fields, types, default values) by opening the config in platformatic.db.json. +Running platformatic db schema config you can update your schema so that it matches well the latest changes available on your config.

Generate a schema from the database and prints it to standard output:

  • schema graphql - generate the GraphQL schema
  • schema openapi - generate the OpenAPI schema

Options:

  • -c, --config FILE - Specify a configuration file to use.

If not specified, the configuration will be loaded from any of the following, in the current directory.

  • platformatic.db.json, or
  • platformatic.db.yml, or
  • platformatic.db.tml

You can find more details about the configuration format here:

seed

Load a seed into the database. This is a convenience method that loads +a JavaScript file and configure @platformatic/sql-mapper to connect to +the database specified in the configuration file.

Here is an example of a seed file:

  'use strict'

module.exports = async function ({ entities, db, sql }) {
await entities.graph.save({ input: { name: 'Hello' } })
await db.query(sql`
INSERT INTO graphs (name) VALUES ('Hello 2');
`)
}

You can run this using the seed command:

  $ platformatic db seed seed.js

Options:

  • --config - Path to the configuration file.

If not specified, the configuration will be loaded from any of the following, in the current directory.

  • platformatic.db.json, or
  • platformatic.db.yml, or
  • platformatic.db.tml

You can find more details about the configuration format here:

start

Start the Platformatic DB server with the following command:

 $ platformatic db start

You will need a configuration file. Here is an example to get you started, +save the following as platformatic.db.json:

  {
"server": {
"hostname": "127.0.0.1",
"port": 0,
"logger": {
"level": "info"
}
},
"db": {
"connectionString": "sqlite://./db"
},
"migrations": {
"dir": "./migrations"
}
}

Remember to create a migration, run the db help migrate command to know more.

All outstanding migrations will be applied to the database unless the +migrations.autoApply configuration option is set to false.

By sending the SIGUSR2 signal, the server can be reloaded.

Options:

  • -c, --config FILE - Specify a configuration file to use.

If not specified, the configuration will be loaded from any of the following, in the current directory.

  • platformatic.db.json, or
  • platformatic.db.yml, or
  • platformatic.db.tml

You can find more details about the configuration format here:

types

Generate typescript types for your entities from the database.

  $ platformatic db types

As a result of executing this command, the Platformatic DB will generate a types +folder with a typescript file for each database entity. It will also generate a +global.d.ts file that injects the types into the Application instance.

In order to add type support to your plugins, you need to install some additional +dependencies. To do this, copy and run an npm install command with dependencies +that "platformatic db types" will ask you.

Here is an example of a platformatic plugin.js with jsdoc support. +You can use it to add autocomplete to your code.

/// <reference path="./global.d.ts" />
'use strict'

/** @param {import('fastify').FastifyInstance} app */
module.exports = async function (app) {
app.get('/movie', async () => {
const movies = await app.platformatic.entities.movie.find({
where: { title: { eq: 'The Hitchhiker\'s Guide to the Galaxy' } }
})
return movies[0].id
})
}

If not specified, the configuration will be loaded from any of the following, in the current directory.

  • platformatic.db.json, or
  • platformatic.db.yml, or
  • platformatic.db.tml

You can find more details about the configuration format here:

service

platformatic service <command>

compile

Compile typescript plugins.

  $ platformatic service compile

As a result of executing this command, Platformatic Service will compile typescript +plugins in the outDir directory.

If not specified, the configuration will be loaded from any of the following, in the current directory.

  • platformatic.service.json, or
  • platformatic.service.yml, or
  • platformatic.service.tml

You can find more details about the configuration format here:

help

Available commands:

  • help - show this help message.
  • help <command> - shows more information about a command.
  • start - start the server.
  • schema config - generate the schema configuration file.

schema

Update the config schema file:

  • schema config - update the JSON schema config available on platformatic.service.schema.json

Your configuration on platformatic.service.json has a schema defined to improve the developer experience and avoid mistakes when updating the configuration of Platformatic Service. +When you initialize a new Platformatic service (f.e. running npm create platformatic@latest), a new JSON $schema property is added in the platformatic.service.json config. This can allow your IDE to add suggestions (f.e. mandatory/missing fields, types, default values) by opening the config in platformatic.service.json. +Running platformatic service schema config you can update your schema so that it matches well the latest changes available on your config.

start

Start the Platformatic Service with the following command:

 $ platformatic service start

You will need a configuration file. Here is an example to get you started, +save the following as platformatic.service.json:

{
"server": {
"hostname": "127.0.0.1",
"port": 0,
"logger": {
"level": "info"
}
},
"plugin": {
"path": "./plugin.js"
}
}

frontend

platformatic frontend <url> <language>

Create frontend code to consume the REST APIs of a Platformatic application.

From the directory you want the frontend code to be generated (typically <YOUR_FRONTEND_APP_DIRECTORY>/src/) run -

npx platformatic frontend http://127.0.0.1:3042 ts

ℹ️

Where http://127.0.0.1:3042 must be replaced with your Platformatic application endpoint, and the language can either be ts or js. When the command is run, the Platformatic CLI generates -

  • api.d.ts - A TypeScript module that includes all the OpenAPI-related types.
  • api.ts or api.js - A module that includes a function for every single REST endpoint.

If you use the --name option it will create custom file names.

npx platformatic frontend http://127.0.0.1:3042 ts --name foobar

Will create foobar.ts and foobar-types.d.ts

Refer to the dedicated guide where the full process of generating and consuming the frontend code is described.

In case of problems, please check that:

  • The Platformatic app URL is valid.
  • The Platformatic app whose URL belongs must be up and running.
  • OpenAPI must be enabled (db.openapi in your platformatic.db.json is not set to false). You can find more details about the db configuration format here.
  • CORS must be managed in your Platformatic app (server.cors.origin.regexp in your platformatic.db.json is set to /*/, for instance). You can find more details about the cors configuration here.

runtime

platformatic runtime <command>

compile

Compile all typescript plugins for all services.

  $ platformatic runtime compile

This command will compile the TypeScript +plugins for each services registered in the runtime.

help

Available commands:

  • help - show this help message.
  • help <command> - shows more information about a command.
  • start - start the application.

start

Start the Platformatic Runtime with the following command:

 $ platformatic runtime start

start

Start a Platformatic application with the following command:

$ platformatic start

Options:

  • -c, --config <path> - Path to the configuration file.
  • --inspect[=[host:]port] - Start the Node.js debugger. host defaults to '127.0.0.1'. port defaults to 9229. Use caution when binding to a public host:port combination.
  • --inspect-brk[=[host:]port] - Start the Node.js debugger and block until a client has attached. host defaults to '127.0.0.1'. port defaults to 9229. Use caution when binding to a public host:port combination.
+ + + + \ No newline at end of file diff --git a/docs/0.42.0/reference/client/frontend/index.html b/docs/0.42.0/reference/client/frontend/index.html new file mode 100644 index 00000000000..fce95885d38 --- /dev/null +++ b/docs/0.42.0/reference/client/frontend/index.html @@ -0,0 +1,17 @@ + + + + + +Frontend client | Platformatic Open Source Software + + + + + +
+
Version: 0.42.0

Frontend client

Create implementation and type files that exposes a client for a remote OpenAPI server, that uses fetch and can run in any browser.

To create a client for a remote OpenAPI API, you can use the following command:

$ platformatic frontend http://exmaple.com/to/schema/file <language> --name <clientname>

where <language> can be either js or ts.

This will create two files clientname.js (or clientname.ts) and clientname-types.d.ts for types.

clientname by default is api

Usage

The implementation generated by the tool exports all the named operation found and a factory object.

Named operations

import { setBaseUrl, getMovies } from './api.js'

setBaseUrl('http://my-server-url.com') // modifies the global `baseUrl` variable

const movies = await getMovies({})
console.log(movies)

Factory

The factory object is called build and can be used like this

import build from './api.js'

const client = build('http://my-server-url.com')

const movies = await client.getMovies({})
console.log(movies)

You can use both named operations and the factory in the same file. They can work on different hosts, so the factory does not use the global setBaseUrl function.

Generated Code

The type file will look like this

export interface GetMoviesRequest {
'limit'?: number;
'offset'?: number;
// ... all other options
}

interface GetMoviesResponseOK {
'id': number;
'title': string;
}
export interface Api {
setBaseUrl(newUrl: string) : void;
getMovies(req: GetMoviesRequest): Promise<Array<GetMoviesResponseOK>>;
// ... all operations listed here
}

type PlatformaticFrontendClient = Omit<Api, 'setBaseUrl'>
export default function build(url: string): PlatformaticFrontendClient

The javascript implementation will look like this

let baseUrl = ''
/** @type {import('./api-types.d.ts').Api['setBaseUrl']} */
export const setBaseUrl = (newUrl) => { baseUrl = newUrl }

/** @type {import('./api-types.d.ts').Api['getMovies']} */
export const getMovies = async (request) => {
return await _getMovies(baseUrl, request)
}
async function _createMovie (url, request) {
const response = await fetch(`${url}/movies/`, {
method:'post',
body: JSON.stringify(request),
headers: {
'Content-Type': 'application/json'
}
})

if (!response.ok) {
throw new Error(await response.text())
}

return await response.json()
}

/** @type {import('./api-types.d.ts').Api['createMovie']} */
export const createMovie = async (request) => {
return await _createMovie(baseUrl, request)
}
// ...

export default function build (url) {
return {
getMovies: _getMovies.bind(url, ...arguments),
// ...
}
}

The typescript implementation will look like this

import type { Api } from './api-types'
import * as Types from './api-types'

let baseUrl = ''
export const setBaseUrl = (newUrl: string) : void => { baseUrl = newUrl }

const _getMovies = async (url: string, request: Types.GetMoviesRequest) => {
const response = await fetch(`${url}/movies/?${new URLSearchParams(Object.entries(request || {})).toString()}`)

if (!response.ok) {
throw new Error(await response.text())
}

return await response.json()
}

export const getMovies: Api['getMovies'] = async (request: Types.GetMoviesRequest) => {
return await _getMovies(baseUrl, request)
}
// ...
export default function build (url) {
return {
getMovies: _getMovies.bind(url, ...arguments),
// ...
}
}
+ + + + \ No newline at end of file diff --git a/docs/0.42.0/reference/client/introduction/index.html b/docs/0.42.0/reference/client/introduction/index.html new file mode 100644 index 00000000000..4d7eb8544a5 --- /dev/null +++ b/docs/0.42.0/reference/client/introduction/index.html @@ -0,0 +1,34 @@ + + + + + +Platformatic Client | Platformatic Open Source Software + + + + + +
+
Version: 0.42.0

Platformatic Client

Create a Fastify plugin that exposes a client for a remote OpenAPI or GraphQL API.

To create a client for a remote OpenAPI API, you can use the following command:

$ platformatic client http://exmaple.com/to/schema/file --name myclient

To create a client for a remote Graphql API, you can use the following command:

$ platformatic client http://exmaple.com/grapqhl --name myclient

Usage with Platformatic Service or Platformatic DB

If you run the generator in in a Platformatic application, and it will +automatically extend it to load your client by editing the configuration file +and adding a clients section. +Then, in any part of your Platformatic application you can use the client.

You can use the client in your application in Javascript, calling a GraphQL endpoint:

// Use a typescript reference to set up autocompletion
// and explore the generated APIs.

/// <reference path="./myclient" />

/** @type {import('fastify').FastifyPluginAsync<{} */
module.exports = async function (app, opts) {
app.post('/', async (request, reply) => {
const res = await app.myclient.graphql({
query: 'query { movies { title } }'
})
return res
})
}

or in Typescript, calling an OpenAPI endpoint:

import { FastifyInstance } from 'fastify'
/// <reference path="./myclient" />

export default async function (app: FastifyInstance) {
app.get('/', async () => {
return app.myclient.get({})
})
}

The client configuration in the platformatic.db.json and platformatic.service.json would look like:

{
"clients": [{
"schema": "./myclient/myclient.openapi.json" // or ./myclient/myclient.schema.graphl
"name": "myclient",
"type": "openapi" // or graphql
"url": "{ PLT_MYCLIENT_URL }"
}]
}

Note that the generator would also have updated the .env and .env.sample files if they exists.

Generating a client for a service running within Platformatic Runtime

Platformatic Runtime allows you to create a network of services that are not exposed. +To create a client to invoke one of those services from another, run:

$ platformatic client --name <clientname> --runtime <serviceId>

Where <clientname> is the name of the client and <serviceId> is the id of the given service +(which correspond in the basic case with the folder name of that service). +The client generated is identical to the one in the previous section.

Note that this command looks for a platformatic.runtime.json in a parent directory.

Example

As an example, consider a network of three microservices:

  • somber-chariot, an instance of Platformatic DB;
  • languid-noblemen, an instance of Platformatic Service;
  • pricey-paesant, an instance of Platformatic Composer, which is also the runtime entrypoint.

From within the languid-noblemen folder, we can run:

$ platformatic client --name chariot --runtime somber-chariot

The client configuration in the platformatic.db.json and platformatic.service.json would look like:

{
"clients": [{
"path": "./chariot",
"serviceId": "somber-chariot"
}]
}

Even if the client is generated from an HTTP endpoint, it is possible to add a serviceId property each client object shown above. +This is not required, but if using the Platformatic Runtime, the serviceId +property will be used to identify the service dependency.

Types Generator

The types for the client are automatically generated for both OpenAPI and GraphQL schemas.

You can generate only the types with the --types-only flag.

For example

$ platformatic client http://exmaple.com/to/schema/file --name myclient --types-only

Will create the single myclient.d.ts file in current directory

OpenAPI

We provide a fully typed experience for OpenAPI, Typing both the request and response for +each individual OpenAPI operation.

Consider this example:

// Omitting all the individual Request and Reponse payloads for brevity

interface Client {
getMovies(req: GetMoviesRequest): Promise<Array<GetMoviesResponse>>;
createMovie(req: CreateMovieRequest): Promise<CreateMovieResponse>;
updateMovies(req: UpdateMoviesRequest): Promise<Array<UpdateMoviesResponse>>;
getMovieById(req: GetMovieByIdRequest): Promise<GetMovieByIdResponse>;
updateMovie(req: UpdateMovieRequest): Promise<UpdateMovieResponse>;
updateMovie(req: UpdateMovieRequest): Promise<UpdateMovieResponse>;
deleteMovies(req: DeleteMoviesRequest): Promise<DeleteMoviesResponse>;
getQuotesForMovie(req: GetQuotesForMovieRequest): Promise<Array<GetQuotesForMovieResponse>>;
getQuotes(req: GetQuotesRequest): Promise<Array<GetQuotesResponse>>;
createQuote(req: CreateQuoteRequest): Promise<CreateQuoteResponse>;
updateQuotes(req: UpdateQuotesRequest): Promise<Array<UpdateQuotesResponse>>;
getQuoteById(req: GetQuoteByIdRequest): Promise<GetQuoteByIdResponse>;
updateQuote(req: UpdateQuoteRequest): Promise<UpdateQuoteResponse>;
updateQuote(req: UpdateQuoteRequest): Promise<UpdateQuoteResponse>;
deleteQuotes(req: DeleteQuotesRequest): Promise<DeleteQuotesResponse>;
getMovieForQuote(req: GetMovieForQuoteRequest): Promise<GetMovieForQuoteResponse>;
}

type ClientPlugin = FastifyPluginAsync<NonNullable<client.ClientOptions>>

declare module 'fastify' {
interface FastifyInstance {
'client': Client;
}

interface FastifyRequest {
'client': Client;
}
}

declare namespace Client {
export interface ClientOptions {
url: string
}
export const client: ClientPlugin;
export { client as default };
}

declare function client(...params: Parameters<ClientPlugin>): ReturnType<ClientPlugin>;
export = client;

GraphQL

We provide a partially typed experience for GraphQL, because we do not want to limit +how you are going to query the remote system. Take a look at this example:

declare module 'fastify' {
interface GraphQLQueryOptions {
query: string;
headers: Record<string, string>;
variables: Record<string, unknown>;
}
interface GraphQLClient {
graphql<T>(GraphQLQuery): PromiseLike<T>;
}
interface FastifyInstance {
'client'
: GraphQLClient;

}

interface FastifyRequest {
'client'<T>(GraphQLQuery): PromiseLike<T>;
}
}

declare namespace client {
export interface Clientoptions {
url: string
}
export interface Movie {
'id'?: string;

'title'?: string;

'realeasedDate'?: string;

'createdAt'?: string;

'preferred'?: string;

'quotes'?: Array<Quote>;

}
export interface Quote {
'id'?: string;

'quote'?: string;

'likes'?: number;

'dislikes'?: number;

'movie'?: Movie;

}
export interface MoviesCount {
'total'?: number;

}
export interface QuotesCount {
'total'?: number;

}
export interface MovieDeleted {
'id'?: string;

}
export interface QuoteDeleted {
'id'?: string;

}
export const client: Clientplugin;
export { client as default };
}

declare function client(...params: Parameters<Clientplugin>): ReturnType<Clientplugin>;
export = client;

Given only you can know what GraphQL query you are producing, you are responsible for typing +it accordingly.

Usage with standalone Fastify

If a platformatic configuration file is not found, a complete Fastify plugin is generated to be +used in your Fastify application like so:

const fastify = require('fastify')()
const client = require('./your-client-name')

fastify.register(client, {
url: 'http://example.com'
})

// GraphQL
fastify.post('/', async (request, reply) => {
const res = await request.movies.graphql({
query: 'mutation { saveMovie(input: { title: "foo" }) { id, title } }'
})
return res
})

// OpenAPI
fastify.post('/', async (request, reply) => {
const res = await request.movies.createMovie({ title: 'foo' })
return res
})

fastify.listen({ port: 3000 })

Note that you would need to install @platformatic/client as a depedency.

How are the method names defined in OpenAPI

The names of the operations are defined in the OpenAPI specification. +Specifically, we use the operationId. +If that's not part of the spec, +the name is generated by combining the parts of the path, +like /something/{param1}/ and a method GET, it genertes getSomethingParam1.

Authentication

It's very common that downstream services requires some form of Authentication. +How could we add the necessary headers? You can configure them from your plugin:

/// <reference path="./myclient" />

/** @type {import('fastify').FastifyPluginAsync<{} */
module.exports = async function (app, opts) {
app.configureMyclient({
async getHeaders (req, reply) {
return {
'foo': 'bar'
}
}
})

app.post('/', async (request, reply) => {
const res = await app.myclient.graphql({
query: 'query { movies { title } }'
})
return res
})
}

Telemetry propagation

To correctly propagate telemetry information, be sure to get the client from the request object, e.g.:

fastify.post('/', async (request, reply) => {
const res = await request.movies.createMovie({ title: 'foo' })
return res
})
+ + + + \ No newline at end of file diff --git a/docs/0.42.0/reference/client/programmatic/index.html b/docs/0.42.0/reference/client/programmatic/index.html new file mode 100644 index 00000000000..de6b514a68d --- /dev/null +++ b/docs/0.42.0/reference/client/programmatic/index.html @@ -0,0 +1,17 @@ + + + + + +Programmatic API | Platformatic Open Source Software + + + + + +
+
Version: 0.42.0

Programmatic API

It is possible to use the Platformatic client without the generator.

OpenAPI Client

import { buildOpenAPIClient } from '@platformatic/client'

const client = await buildOpenAPIClient({
url: `https://yourapi.com/documentation/json`,
// path: 'path/to/openapi.json',
headers: {
'foo': 'bar'
}
})

const res = await client.yourOperationName({ foo: 'bar' })

console.log(res)

If you use Typescript you can take advantage of the generated types file

import { buildOpenAPIClient } from '@platformatic/client'
import Client from './client'
//
// interface Client {
// getMovies(req: GetMoviesRequest): Promise<Array<GetMoviesResponse>>;
// createMovie(req: CreateMovieRequest): Promise<CreateMovieResponse>;
// ...
// }
//

const client: Client = await buildOpenAPIClient<Client>({
url: `https://yourapi.com/documentation/json`,
// path: 'path/to/openapi.json',
headers: {
'foo': 'bar'
}
})

const res = await client.getMovies()
console.log(res)

GraphQL Client

import { buildGraphQLClient } from '@platformatic/client'

const client = await buildGraphQLClient({
url: `https://yourapi.com/graphql`,
headers: {
'foo': 'bar'
}
})

const res = await client.graphql({
query: `
mutation createMovie($title: String!) {
saveMovie(input: {title: $title}) {
id
title
}
}
`,
variables: {
title: 'The Matrix'
}
})

console.log(res)
+ + + + \ No newline at end of file diff --git a/docs/0.42.0/reference/composer/api-modification/index.html b/docs/0.42.0/reference/composer/api-modification/index.html new file mode 100644 index 00000000000..d96a2310b17 --- /dev/null +++ b/docs/0.42.0/reference/composer/api-modification/index.html @@ -0,0 +1,19 @@ + + + + + +API modification | Platformatic Open Source Software + + + + + +
+
Version: 0.42.0

API modification

If you want to modify automatically generated API, you can use composer custom onRoute hook.

addComposerOnRouteHook(openApiPath, methods, handler)

  • openApiPath (string) - A route OpenAPI path that Platformatic Composer takes from the OpenAPI specification.
  • methods (string[]) - Route HTTP methods that Platformatic Composer takes from the OpenAPI specification.
  • handler (function) - fastify onRoute hook handler.

onComposerResponse

onComposerResponse hook is called after the response is received from a composed service. +It might be useful if you want to modify the response before it is sent to the client. +If you want to use it you need to add onComposerResponse property to the config object of the route options.

  • request (object) - fastify request object.
  • reply (object) - fastify reply object.
  • body (object) - undici response body object.

Example

app.platformatic.addComposerOnRouteHook('/users/{id}', ['GET'], routeOptions => {
routeOptions.schema.response[200] = {
type: 'object',
properties: {
firstName: { type: 'string' },
lastName: { type: 'string' }
}
}

async function onComposerResponse (request, reply, body) {
const payload = await body.json()
const newPayload = {
firstName: payload.first_name,
lastName: payload.last_name
}
reply.send(newPayload)
}
routeOptions.config.onComposerResponse = onComposerResponse
})
+ + + + \ No newline at end of file diff --git a/docs/0.42.0/reference/composer/configuration/index.html b/docs/0.42.0/reference/composer/configuration/index.html new file mode 100644 index 00000000000..05be9693193 --- /dev/null +++ b/docs/0.42.0/reference/composer/configuration/index.html @@ -0,0 +1,38 @@ + + + + + +Configuration | Platformatic Open Source Software + + + + + +
+
Version: 0.42.0

Configuration

Platformatic Composer configured with a configuration file. It supports the use +of environment variables as setting values with configuration placeholders.

Configuration file

If the Platformatic CLI finds a file in the current working directory matching +one of these filenames, it will automatically load it:

  • platformatic.composer.json
  • platformatic.composer.json5
  • platformatic.composer.yml or platformatic.composer.yaml
  • platformatic.composer.tml or platformatic.composer.toml

Alternatively, a --config option with a configuration +filepath can be passed to most platformatic composer CLI commands.

The configuration examples in this reference use JSON.

Supported formats

FormatExtensions
JSON.json
JSON5.json5
YAML.yml, .yaml
TOML.tml

Comments are supported by the JSON5, YAML and TOML file formats.

Settings

Configuration settings are organised into the following groups:

Sensitive configuration settings containing sensitive data should be set using configuration placeholders.

server

A required object with the following settings:

  • hostname (required, string) — Hostname where Platformatic Composer server will listen for connections.

  • port (required, number) — Port where Platformatic Composer server will listen for connections.

  • healthCheck (boolean or object) — Enables the health check endpoint.

    • Powered by @fastify/under-pressure.
    • The value can be an object, used to specify the interval between checks in milliseconds (default: 5000)

    Example

    {
    "server": {
    ...
    "healthCheck": {
    "interval": 2000
    }
    }
    }
  • cors (object) — Configuration for Cross-Origin Resource Sharing (CORS) headers.

    • All options will be passed to the @fastify/cors plugin. In order to specify a RegExp object, you can pass { regexp: 'yourregexp' }, +it will be automatically converted.
  • logger (object) -- the logger configuration.

  • pluginTimeout (integer) -- the number of milliseconds to wait for a Fastify plugin to load, see the fastify docs for more details.

  • https (object) - Configuration for HTTPS supporting the following options.

    • key (required, string, object, or array) - If key is a string, it specifies the private key to be used. If key is an object, it must have a path property specifying the private key file. Multiple keys are supported by passing an array of keys.
    • cert (required, string, object, or array) - If cert is a string, it specifies the certificate to be used. If cert is an object, it must have a path property specifying the certificate file. Multiple certificates are supported by passing an array of keys.

metrics

Configuration for a Prometheus server that will export monitoring metrics +for the current server instance. It uses fastify-metrics +under the hood.

This setting can be a boolean or an object. If set to true the Prometheus server will listen on http://0.0.0.0:9090.

Supported object properties:

  • hostname (string) — The hostname where Prometheus server will listen for connections.
  • port (number) — The port where Prometheus server will listen for connections.
  • auth (object) — Basic Auth configuration. username and password are required here +(use environment variables).

plugins

An optional object that defines the plugins loaded by Platformatic Composer.

  • paths (required, array): an array of paths (string) +or an array of objects composed as follows,

    • path (string): Relative path to plugin's entry point.
    • options (object): Optional plugin options.
    • encapsulate (boolean): if the path is a folder, it instruct Platformatic to not encapsulate those plugins.
    • maxDepth (integer): if the path is a folder, it limits the depth to load the content from.
  • typescript (boolean): enable typescript compilation. A tsconfig.json file is required in the same folder.

    Example

    {
    "plugins": {
    "paths": [{
    "path": "./my-plugin.js",
    "options": {
    "foo": "bar"
    }
    }]
    }
    }

watch

Disable watching for file changes if set to false. It can also be customized with the following options:

  • ignore (string[], default: null): List of glob patterns to ignore when watching for changes. If null or not specified, ignore rule is not applied. Ignore option doesn't work for typescript files.

  • allow (string[], default: ['*.js', '**/*.js']): List of glob patterns to allow when watching for changes. If null or not specified, allow rule is not applied. Allow option doesn't work for typescript files.

    Example

    {
    "watch": {
    "ignore": ["*.mjs", "**/*.mjs"],
    "allow": ["my-plugin.js", "plugins/*.js"]
    }
    }

composer

Configure @platformatic/composer specific settings such as services or refreshTimeout:

  • services (array, default: []) — is an array of objects that defines +the services managed by the composer. Each service object supports the following settings:

    • id (required, string) - A unique identifier for the service.
    • origin (string) - A service origin. Skip this option if the service is executing inside of Platformatic Runtime. In this case, service id will be used instead of origin.
    • openapi (required, object) - The configuration file used to compose OpenAPI specification. See the openapi for details.
    • proxy (object or false) - Service proxy configuration. If false, the service proxy is disabled.
      • prefix (required, string) - Service proxy prefix. All service routes will be prefixed with this value.
    • refreshTimeout (number) - The number of milliseconds to wait for check for changes in the service OpenAPI specification. If not specified, the default value is 1000.

openapi

  • url (string) - A path of the route that exposes the OpenAPI specification. If a service is a Platformatic Service or Platformatic DB, use /documentation/json as a value. Use this or file option to specify the OpenAPI specification.
  • file (string) - A path to the OpenAPI specification file. Use this or url option to specify the OpenAPI specification.
  • prefix (string) - A prefix for the OpenAPI specification. All service routes will be prefixed with this value.
  • config (string) - A path to the OpenAPI configuration file. This file is used to customize the OpenAPI specification. See the openapi-configuration for details.
openapi-configuration

The OpenAPI configuration file is a JSON file that is used to customize the OpenAPI specification. It supports the following options:

  • ignore (boolean) - If true, the route will be ignored by the composer. +If you want to ignore a specific method, use the ignore option in the nested method object.

    Example

    {
    "paths": {
    "/users": {
    "ignore": true
    },
    "/users/{id}": {
    "get": { "ignore": true },
    "put": { "ignore": true }
    }
    }
    }
  • alias (string) - Use it create an alias for the route path. Original route path will be ignored.

    Example

    {
    "paths": {
    "/users": {
    "alias": "/customers"
    }
    }
    }
  • rename (string) - Use it to rename composed route response fields. +Use json schema format to describe the response structure. For now it works only for 200 response.

    Example

    {
    "paths": {
    "/users": {
    "responses": {
    "200": {
    "type": "array",
    "items": {
    "type": "object",
    "properties": {
    "id": { "rename": "user_id" },
    "name": { "rename": "first_name" }
    }
    }
    }
    }
    }
    }
    }

Examples

Composition of two remote services:

{
"composer": {
"services": [
{
"id": "auth-service",
"origin": "https://auth-service.com",
"openapi": {
"url": "/documentation/json",
"prefix": "auth"
}
},
{
"id": "payment-service",
"origin": "https://payment-service.com",
"openapi": {
"file": "./schemas/payment-service.json"
}
}
],
"refreshTimeout": 1000
}
}

Composition of two local services inside of Platformatic Runtime:

{
"composer": {
"services": [
{
"id": "auth-service",
"openapi": {
"url": "/documentation/json",
"prefix": "auth"
}
},
{
"id": "payment-service",
"openapi": {
"file": "./schemas/payment-service.json"
}
}
],
"refreshTimeout": 1000
}
}

telemetry

Open Telemetry is optionally supported with these settings:

  • serviceName (required, string) — Name of the service as will be reported in open telemetry.
  • version (string) — Optional version (free form)
  • skip (array). Optional list of operations to skip when exporting telemetry defined object with properties:
    • method: GET, POST, PUT, DELETE, PATCH, HEAD, OPTIONS, TRACE
    • path. e.g.: /documentation/json
  • exporter (object or array) — Exporter configuration. If not defined, the exporter defaults to console. If an array of objects is configured, every object must be a valid exporter object. The exporter object has the following properties:
    • type (string) — Exporter type. Supported values are console, otlp, zipkin and memory (default: console). memory is only supported for testing purposes.
    • options (object) — These options are supported:
      • url (string) — The URL to send the telemetry to. Required for otlp exporter. This has no effect on console and memory exporters.
      • headers (object) — Optional headers to send with the telemetry. This has no effect on console and memory exporters.

Note that OTLP traces can be consumed by different solutions, like Jaeger. Here the full list.

Example

{
"telemetry": {
"serviceName": "test-service",
"exporter": {
"type": "otlp",
"options": {
"url": "http://localhost:4318/v1/traces"
}
}
}
}

Environment variable placeholders

The value for any configuration setting can be replaced with an environment variable +by adding a placeholder in the configuration file, for example {PLT_SERVER_LOGGER_LEVEL}.

All placeholders in a configuration must be available as an environment variable +and must meet the allowed placeholder name rules.

Example

platformatic.service.json
{
"server": {
"port": "{PORT}"
}
}

Platformatic will replace the placeholders in this example with the environment +variables of the same name.

Setting environment variables

If a .env file exists it will automatically be loaded by Platformatic using +dotenv. For example:

.env
PLT_SERVER_LOGGER_LEVEL=info
PORT=8080

The .env file must be located in the same folder as the Platformatic configuration +file or in the current working directory.

Environment variables can also be set directly on the commmand line, for example:

PLT_SERVER_LOGGER_LEVEL=debug npx platformatic composer

Allowed placeholder names

Only placeholder names prefixed with PLT_, or that are in this allow list, will be +dynamically replaced in the configuration file:

  • PORT

This restriction is to avoid accidentally exposing system environment variables. +An error will be raised by Platformatic if it finds a configuration placeholder +that isn't allowed.

The default allow list can be extended by passing a --allow-env CLI option with a +comma separated list of strings, for example:

npx platformatic composer --allow-env=HOST,SERVER_LOGGER_LEVEL

If --allow-env is passed as an option to the CLI, it will be merged with the +default allow list.

+ + + + \ No newline at end of file diff --git a/docs/0.42.0/reference/composer/introduction/index.html b/docs/0.42.0/reference/composer/introduction/index.html new file mode 100644 index 00000000000..65a4c99a6e2 --- /dev/null +++ b/docs/0.42.0/reference/composer/introduction/index.html @@ -0,0 +1,22 @@ + + + + + +Platformatic Composer | Platformatic Open Source Software + + + + + +
+
Version: 0.42.0

Platformatic Composer

Platformatic Composer is an HTTP server that automatically aggregates multiple +services APIs into a single API.

info

Platformatic Composer is currently in public beta.

Features

Public beta

Platformatic Composer is in public beta. You can use it in production, but it's quite +likely that you'll encounter significant bugs.

If you run into a bug or have a suggestion for improvement, please +raise an issue on GitHub.

Standalone usage

If you're only interested in the features available in Platformatic Composer, you can replace platformatic with @platformatic/composer in the dependencies of your package.json, so that you'll import fewer deps.

Example configuration file

The following configuration file can be used to start a new Platformatic +Composer project. For more details on the configuration file, see the +configuration documentation.

{
"$schema": "https://platformatic.dev/schemas/v0.26.0/composer",
"server": {
"hostname": "127.0.0.1",
"port": 0,
"logger": {
"level": "info"
}
},
"composer": {
"services": [
{
"id": "auth-service",
"origin": "https://auth-service.com",
"openapi": {
"url": "/documentation/json",
"prefix": "auth"
}
},
{
"id": "payment-service",
"origin": "https://payment-service.com",
"openapi": {
"url": "/documentation/json"
}
}
],
"refreshTimeout": 1000
},
"watch": true
}
+ + + + \ No newline at end of file diff --git a/docs/0.42.0/reference/composer/plugin/index.html b/docs/0.42.0/reference/composer/plugin/index.html new file mode 100644 index 00000000000..e65e36e7967 --- /dev/null +++ b/docs/0.42.0/reference/composer/plugin/index.html @@ -0,0 +1,18 @@ + + + + + +Plugin | Platformatic Open Source Software + + + + + +
+
Version: 0.42.0

Plugin

If you want to add features to a service, you will need to register a plugin, which will be in the form of a standard Fastify plugin.

The config file will specify where the plugin file is located as the example below:

{
...
"plugins": {
"paths": ["./plugin/index.js"]
}
}

The path is relative to the config file path.

You should export an async function which receives a parameters

  • app (FastifyInstance) that is the main fastify instance
  • opts all the options specified in the config file after path

Hot Reload

Plugin file is being watched by fs.watch function.

You don't need to reload Platformatic Composer server while working on your plugin. Every time you save, the watcher will trigger a reload event and the server will auto-restart and load your updated code.

tip

At this time, on Linux, file watch in subdirectories is not supported due to a Node.js limitation (documented here).

Directories

The path can also be a directory. In that case, the directory will be loaded with @fastify/autoload.

Consider the following directory structure:

├── routes
│ ├── foo
│ │ ├── something.js
│ │ └── bar
│ │ └── baz.js
│ ├── single-plugin
│ │ └── utils.js
│ └── another-plugin.js
└── platformatic.composer.json

By default the folder will be added as a prefix to all the routes defined within them. +See the autoload documentation for all the options to customize this behavior.

Multiple plugins

Multiple plugins can be loaded in parallel by specifying an array:

{
...
"plugins": {
"paths": [{
"path": "./plugin/index.js"
}, {
"path": "./routes/"
}]
}
}
+ + + + \ No newline at end of file diff --git a/docs/0.42.0/reference/composer/programmatic/index.html b/docs/0.42.0/reference/composer/programmatic/index.html new file mode 100644 index 00000000000..844728a88e3 --- /dev/null +++ b/docs/0.42.0/reference/composer/programmatic/index.html @@ -0,0 +1,18 @@ + + + + + +Programmatic API | Platformatic Open Source Software + + + + + +
+
Version: 0.42.0

Programmatic API

In many cases it's useful to start Platformatic Composer using an API instead of +command line, e.g. in tests we want to start and stop our server.

The buildServer function allows that:

import { buildServer } from '@platformatic/composer'

const app = await buildServer('path/to/platformatic.composer.json')
await app.start()

const res = await fetch(app.url)
console.log(await res.json())

// do something

await app.close()

It is also possible to customize the configuration:

import { buildServer } from '@platformatic/composer'

const app = await buildServer({
server: {
hostname: '127.0.0.1',
port: 0
},
services: [
{
id: 'auth-service',
origin: 'https://auth-service.com',
openapi: {
url: '/documentation/json',
prefix: 'auth'
}
},
{
id: 'payment-service',
origin: 'https://payment-service.com',
openapi: {
file: './schemas/payment-service.json'
}
}
]
})

await app.start()

const res = await fetch(app.url)
console.log(await res.json())

// do something

await app.close()
+ + + + \ No newline at end of file diff --git a/docs/0.42.0/reference/db/authorization/introduction/index.html b/docs/0.42.0/reference/db/authorization/introduction/index.html new file mode 100644 index 00000000000..60973d024c4 --- /dev/null +++ b/docs/0.42.0/reference/db/authorization/introduction/index.html @@ -0,0 +1,21 @@ + + + + + +Authorization | Platformatic Open Source Software + + + + + +
+
Version: 0.42.0

Authorization

Introduction

Authorization in Platformatic DB is role-based. User authentication and the +assignment of roles must be handled by an external authentication service.

Configuration

Authorization strategies and rules are configured via a Platformatic DB +configuration file. See the Platformatic DB Configuration +documentation for the supported settings.

Bypass authorization in development

To make testing and developing easier, it's possible to bypass authorization checks +if an adminSecret is set. See the HTTP headers (development only) documentation.

+ + + + \ No newline at end of file diff --git a/docs/0.42.0/reference/db/authorization/rules/index.html b/docs/0.42.0/reference/db/authorization/rules/index.html new file mode 100644 index 00000000000..a847d9883a2 --- /dev/null +++ b/docs/0.42.0/reference/db/authorization/rules/index.html @@ -0,0 +1,28 @@ + + + + + +Rules | Platformatic Open Source Software + + + + + +
+
Version: 0.42.0

Rules

Introduction

Authorization rules can be defined to control what operations users are +able to execute via the REST or GraphQL APIs that are exposed by a Platformatic +DB app.

Every rule must specify:

  • role (required) — A role name. It's a string and must match with the role(s) set by an external authentication service.
  • entity (optional) — The Platformatic DB entity to apply this rule to.
  • entities (optional) — The Platformatic DB entities to apply this rule to.
  • defaults (optional) — Configure entity fields that will be +automatically set from user data.
  • One entry for each supported CRUD operation: find, save, delete

One of entity and entities must be specified.

Operation checks

Every entity operation — such as find, insert, save or delete — can have +authorization checks specified for them. This value can be false (operation disabled) +or true (operation enabled with no checks).

To specify more fine-grained authorization controls, add a checks field, e.g.:

{
"role": "user",
"entity": "page",
"find": {
"checks": {
"userId": "X-PLATFORMATIC-USER-ID"
}
},
...
}

In this example, when a user with a user role executes a findPage, they can +access all the data that has userId equal to the value in user metadata with +key X-PLATFORMATIC-USER-ID.

Note that "userId": "X-PLATFORMATIC-USER-ID" is syntactic sugar for:

      "find": {
"checks": {
"userId": {
"eq": "X-PLATFORMATIC-USER-ID"
}
}
}

It's possible to specify more complex rules using all the supported where clause operators.

Note that userId MUST exist as a field in the database table to use this feature.

GraphQL events and subscriptions

Platformatic DB supports GraphQL subscriptions and therefore db-authorization must protect them. +The check is performed based on the find permissions, the only permissions that are supported are:

  1. find: false, the subscription for that role is disabled
  2. find: { checks: { [prop]: 'X-PLATFORMATIC-PROP' } } validates that the given prop is equal
  3. find: { checks: { [prop]: { eq: 'X-PLATFORMATIC-PROP' } } } validates that the given prop is equal

Conflicting rules across roles for different equality checks will not be supported.

Restrict access to entity fields

If a fields array is present on an operation, Platformatic DB restricts the columns on which the user can execute to that list. +For save operations, the configuration must specify all the not-nullable fields (otherwise, it would fail at runtime). +Platformatic does these checks at startup.

Example:

    "rule": {
"entity": "page",
"role": "user",
"find": {
"checks": {
"userId": "X-PLATFORMATIC-USER-ID"
},
"fields": ["id", "title"]
}
...
}

In this case, only id and title are returned for a user with a user role on the page entity.

Set entity fields from user metadata

Defaults are used in database insert and are default fields added automatically populated from user metadata, e.g.:

        "defaults": {
"userId": "X-PLATFORMATIC-USER-ID"
},

When an entity is created, the userId column is used and populated using the value from user metadata.

Programmatic rules

If it's necessary to have more control over the authorizations, it's possible to specify the rules programmatically, e.g.:


app.register(auth, {
jwt: {
secret: 'supersecret'
},
rules: [{
role: 'user',
entity: 'page',
async find ({ user, ctx, where }) {
return {
...where,
userId: {
eq: user['X-PLATFORMATIC-USER-ID']
}
}
},
async delete ({ user, ctx, where }) {
return {
...where,
userId: {
eq: user['X-PLATFORMATIC-USER-ID']
}
}
},
defaults: {
userId: async function ({ user, ctx, input }) {
match(user, {
'X-PLATFORMATIC-USER-ID': generated.shift(),
'X-PLATFORMATIC-ROLE': 'user'
})
return user['X-PLATFORMATIC-USER-ID']
}

},
async save ({ user, ctx, where }) {
return {
...where,
userId: {
eq: user['X-PLATFORMATIC-USER-ID']
}
}
}
}]
})

In this example, the user role can delete all the posts edited before yesterday:

 app.register(auth, {
jwt: {
secret: 'supersecret'
},
roleKey: 'X-PLATFORMATIC-ROLE',
anonymousRole: 'anonymous',
rules: [{
role: 'user',
entity: 'page',
find: true,
save: true,
async delete ({ user, ctx, where }) {
return {
...where,
editedAt: {
lt: yesterday
}
}
},
defaults: {
userId: 'X-PLATFORMATIC-USER-ID'
}
}]
})

Access validation on entity mapper for plugins

To assert that a specific user with it's role(s) has the correct access rights to use entities on a platformatic plugin the context should be passed to the entity mapper in order to verify it's permissions like this:

//plugin.js

app.post('/', async (req, reply) => {
const ctx = req.createPlatformaticCtx()

await app.platformatic.entities.movie.find({
where: { /*...*/ },
ctx
})
})

Skip authorization rules

In custom plugins, it's possible to skip the authorization rules on entities programmatically by setting the skipAuth flag to true or not passing a ctx, e.g.:

// this works even if the user's role doesn't have the `find` permission.
const result = await app.platformatic.entities.page.find({skipAuth: true, ...})

This has the same effect:

// this works even if the user's role doesn't have the `find` permission
const result = await app.platformatic.entities.page.find() // no `ctx`

This is useful for custom plugins for which the authentication is not necessary, so there is no user role set when invoked.

info

Skip authorization rules is not possible on the automatically generated REST and GraphQL APIs.

Avoid repetition of the same rule multiple times

Very often we end up writing the same rules over and over again. +Instead, it's possible to condense the rule for multiple entities on a single entry:

 app.register(auth, {
jwt: {
secret: 'supersecret'
},
roleKey: 'X-PLATFORMATIC-ROLE',
anonymousRole: 'anonymous',
rules: [{
role: 'anonymous',
entities: ['category', 'page'],
find: true,
delete: false,
save: false
}]
})
+ + + + \ No newline at end of file diff --git a/docs/0.42.0/reference/db/authorization/strategies/index.html b/docs/0.42.0/reference/db/authorization/strategies/index.html new file mode 100644 index 00000000000..ca1345f3f08 --- /dev/null +++ b/docs/0.42.0/reference/db/authorization/strategies/index.html @@ -0,0 +1,40 @@ + + + + + +Strategies | Platformatic Open Source Software + + + + + +
+
Version: 0.42.0

Strategies

Introduction

Platformatic DB supports the following authorization strategies:

JSON Web Token (JWT)

The JSON Web Token (JWT) authorization strategy is built on top +of the @fastify/jwt Fastify plugin.

Platformatic DB JWT integration

To configure it, the quickest way is to pass a shared secret in your +Platformatic DB configuration file, for example:

platformatic.db.json
{
"authorization": {
"jwt": {
"secret": "<shared-secret>"
}
}
}

By default @fastify/jwt looks for a JWT in an HTTP request's Authorization +header. This requires HTTP requests to the Platformatic DB API to include an +Authorization header like this:

Authorization: Bearer <token>

See the @fastify/jwt documentation +for all of the available configuration options.

JSON Web Key Sets (JWKS)

The JWT authorization strategy includes support for JSON Web Key Sets.

To configure it:

platformatic.db.json
{
"authorization": {
"jwt": {
"jwks": {
"allowedDomains": [
"https://ISSUER_DOMAIN"
]
}
}
}
}

When a JSON Web Token is included in a request to Platformatic DB, it retrieves the +correct public key from https:/ISSUER_DOMAIN/.well-known/jwks.json and uses it to +verify the JWT signature. The token carries all the informations, like the kid, +which is the key id used to sign the token itself, so no other configuration is required.

JWKS can be enabled without any options:

platformatic.db.json
{
"authorization": {
"jwt": {
"jwks": true
}
}
}

When configured like this, the JWK URL is calculated from the iss (issuer) field of JWT, so +every JWT token from an issuer that exposes a valid JWKS token will pass the validation. +This configuration should only be used in development, while +in every other case the allowedDomains option should be specified.

Any option supported by the get-jwks +library can be specified in the authorization.jwt.jwks object.

JWT Custom Claim Namespace

JWT claims can be namespaced to avoid name collisions. If so, we will receive tokens +with custom claims such as: https://platformatic.dev/X-PLATFORMATIC-ROLE +(where https://platformatic.cloud/ is the namespace). +If we want to map these claims to user metadata removing our namespace, we can +specify the namespace in the JWT options:

platformatic.db.json
{
"authorization": {
"jwt": {
"namespace": "https://platformatic.dev/"
}
}
}

With this configuration, the https://platformatic.dev/X-PLATFORMATIC-ROLE claim +is mapped to X-PLATFORMATIC-ROLE user metadata.

Webhook

Platformatic DB can use a webhook to authenticate requests.

Platformatic DB Webhook integration

In this case, the URL is configured on authorization:

platformatic.db.json
{
"authorization": {
"webhook": {
"url": "<webhook url>"
}
}
}

When a request is received, Platformatic sends a POST to the webhook, replicating +the same body and headers, except for:

  • host
  • connection

In the Webhook case, the HTTP response contains the roles/user information as HTTP headers.

HTTP headers (development only)

danger

Passing an admin API key via HTTP headers is highly insecure and should only be used +during development or within protected networks.

If a request has X-PLATFORMATIC-ADMIN-SECRET HTTP header set with a valid adminSecret +(see configuration reference) the +role is set automatically as platformatic-admin, unless a different role is set for +user impersonation (which is disabled if JWT or Webhook are set, see below).

Platformatic DB HTTP Headers

Also, the following rule is automatically added to every entity, allowing the user +that presented the adminSecret to perform any operation on any entity:

{
"role": "platformatic-admin",
"find": false,
"delete": false,
"save": false
}
+ + + + \ No newline at end of file diff --git a/docs/0.42.0/reference/db/authorization/user-roles-metadata/index.html b/docs/0.42.0/reference/db/authorization/user-roles-metadata/index.html new file mode 100644 index 00000000000..b72614569fa --- /dev/null +++ b/docs/0.42.0/reference/db/authorization/user-roles-metadata/index.html @@ -0,0 +1,31 @@ + + + + + +User Roles & Metadata | Platformatic Open Source Software + + + + + +
+
Version: 0.42.0

User Roles & Metadata

Introduction

Roles and user information are passed to Platformatic DB from an external +authentication service as a string (JWT claims or HTTP headers). We refer to +this data as user metadata.

Roles

Users can have a list of roles associated with them. These roles can be specified +in an X-PLATFORMATIC-ROLE property as a list of comma separated role names +(the key name is configurable).

Note that role names are just strings.

Reserved roles

Some special role names are reserved by Platformatic DB:

  • platformatic-admin : this identifies a user who has admin powers
  • anonymous: set automatically when no roles are associated

Anonymous role

If a user has no role, the anonymous role is assigned automatically. It's possible +to specify rules to apply to users with this role:

    {
"role": "anonymous",
"entity": "page",
"find": false,
"delete": false,
"save": false
}

In this case, a user that has no role or explicitly has the anonymous role +cannot perform any operations on the page entity.

Role impersonation

If a request includes a valid X-PLATFORMATIC-ADMIN-SECRET HTTP header it is +possible to impersonate a user roles. The roles to impersonate can be specified +by sending a X-PLATFORMATIC-ROLE HTTP header containing a comma separated list +of roles.

note

When JWT or Webhook are set, user role impersonation is not enabled, and the role +is always set as platfomatic-admin automatically if the X-PLATFORMATIC-ADMIN-SECRET +HTTP header is specified.

Role configuration

The roles key in user metadata defaults to X-PLATFORMATIC-ROLE. It's possible to change it using the roleKey field in configuration. Same for the anonymous role, which value can be changed using anonymousRole.

 "authorization": {
"roleKey": "X-MYCUSTOM-ROLE_KEY",
"anonymousRole": "anonym",
"rules": [
...
]
}

User metadata

User roles and other user data, such as userId, are referred to by Platformatic +DB as user metadata.

User metadata is parsed from an HTTP request and stored in a user object on the +Fastify request object. This object is populated on-demand, but it's possible +to populate it explicity with await request.setupDBAuthorizationUser().

+ + + + \ No newline at end of file diff --git a/docs/0.42.0/reference/db/configuration/index.html b/docs/0.42.0/reference/db/configuration/index.html new file mode 100644 index 00000000000..5bd6ce64b3a --- /dev/null +++ b/docs/0.42.0/reference/db/configuration/index.html @@ -0,0 +1,59 @@ + + + + + +Configuration | Platformatic Open Source Software + + + + + +
+
Version: 0.42.0

Configuration

Platformatic DB is configured with a configuration file. It supports the use +of environment variables as setting values with configuration placeholders.

Configuration file

If the Platformatic CLI finds a file in the current working directory matching +one of these filenames, it will automatically load it:

  • platformatic.db.json
  • platformatic.db.json5
  • platformatic.db.yml or platformatic.db.yaml
  • platformatic.db.tml or platformatic.db.toml

Alternatively, a --config option with a configuration +filepath can be passed to most platformatic db CLI commands.

The configuration examples in this reference use JSON.

Supported formats

FormatExtensions
JSON.json
JSON5.json5
YAML.yml, .yaml
TOML.tml

Comments are supported by the JSON5, YAML and TOML file formats.

Settings

Configuration settings are organised into the following groups:

Sensitive configuration settings, such as a database connection URL that contains +a password, should be set using configuration placeholders.

db

A required object with the following settings:

  • connectionString (required, string) — Database connection URL.

    • Example: postgres://user:password@my-database:5432/db-name
  • schema (array of string) - Currently supported only for postgres, schemas used tolook for entities. If not provided, the default public schema is used.

    Examples

  "db": {
"connectionString": "(...)",
"schema": [
"schema1", "schema2"
],
...

},

  • Platformatic DB supports MySQL, MariaDB, PostgreSQL and SQLite.

  • graphql (boolean or object, default: true) — Controls the GraphQL API interface, with optional GraphiQL UI.

    Examples

    Enables GraphQL support

    {
    "db": {
    ...
    "graphql": true
    }
    }

    Enables GraphQL support with GraphiQL

    {
    "db": {
    ...
    "graphql": {
    "graphiql": true
    }
    }
    }

    It's possible to selectively ignore entites:

    {
    "db": {
    ...
    "graphql": {
    "ignore": {
    "categories": true
    }
    }
    }
    }

    It's possible to selectively ignore fields:

    {
    "db": {
    ...
    "graphql": {
    "ignore": {
    "categories": {
    "name": true
    }
    }
    }
    }
    }

    It's possible to add a custom GraphQL schema during the startup:

    {
    "db": {
    ...
    "graphql": {
    "schemaPath": "path/to/schema.graphql"
    }
    }
    }
    }
  • openapi (boolean or object, default: true) — Enables OpenAPI REST support.

    • If value is an object, all OpenAPI v3 allowed properties can be passed. Also a prefix property can be passed to set the OpenAPI prefix.
    • Platformatic DB uses @fastify/swagger under the hood to manage this configuration.

    Examples

    Enables OpenAPI

    {
    "db": {
    ...
    "openapi": true
    }
    }

    Enables OpenAPI with prefix

    {
    "db": {
    ...
    "openapi": {
    "prefix": "/api"
    }
    }
    }

    Enables OpenAPI with options

    {
    "db": {
    ...
    "openapi": {
    "info": {
    "title": "Platformatic DB",
    "description": "Exposing a SQL database as REST"
    }
    }
    }
    }

    You can for example add the security section, so that Swagger will allow you to add the authentication header to your requests. +In the following code snippet, we're adding a Bearer token in the form of a JWT:

    {
    "db": {
    ...
    "openapi": {
    ...
    "security": [{ "bearerAuth": [] }],
    "components": {
    "securitySchemes": {
    "bearerAuth": {
    "type": "http",
    "scheme": "bearer",
    "bearerFormat": "JWT"
    }
    }
    }
    }
    }
    }

    It's possible to selectively ignore entites:

    {
    "db": {
    ...
    "openapi": {
    "ignore": {
    "categories": true
    }
    }
    }
    }

    It's possible to selectively ignore fields:

    {
    "db": {
    ...
    "openapi": {
    "ignore": {
    "categories": {
    "name": true
    }
    }
    }
    }
    }
  • ignore (object) — Key/value object that defines which database tables should not be mapped as API entities.

    Examples

    {
    "db": {
    ...
    "ignore": {
    "versions": true // "versions" table will be not mapped with GraphQL/REST APIs
    }
    }
    }
  • events (boolean or object, default: true) — Controls the support for events published by the SQL mapping layer. +If enabled, this option add support for GraphQL Subscription over WebSocket. By default it uses an in-process message broker. +It's possible to configure it to use Redis instead.

    Examples

    {
    "db": {
    ...
    "events": {
    "connectionString": "redis://:password@redishost.com:6380/"
    }
    }
    }
  • schemalock (boolean or object, default: false) — Controls the caching of the database schema on disk. +If set to true the database schema metadata is stored inside a schema.lock file. +It's also possible to configure the location of that file by specifying a path, like so:

    Examples

    {
    "db": {
    ...
    "schemalock": {
    "path": "./dbmetadata"
    }
    }
    }

    Starting Platformatic DB or running a migration will automatically create the schemalock file.

metrics

Configuration for a Prometheus server that will export monitoring metrics +for the current server instance. It uses fastify-metrics +under the hood.

This setting can be a boolean or an object. If set to true the Prometheus server will listen on http://0.0.0.0:9090.

Supported object properties:

  • hostname (string) — The hostname where Prometheus server will listen for connections.
  • port (number) — The port where Prometheus server will listen for connections.
  • auth (object) — Basic Auth configuration. username and password are required here +(use environment variables).

migrations

Configures Postgrator to run migrations against the database.

An optional object with the following settings:

  • dir (required, string): Relative path to the migrations directory.
  • autoApply (boolean, default: false): Automatically apply migrations when Platformatic DB server starts.

plugins

An optional object that defines the plugins loaded by Platformatic DB.

  • paths (required, array): an array of paths (string) +or an array of objects composed as follows,
    • path (string): Relative path to plugin's entry point.
    • options (object): Optional plugin options.
    • encapsulate (boolean): if the path is a folder, it instruct Platformatic to not +encapsulate those plugins, +allowing decorators and hooks to be shared across all routes.
    • maxDepth (integer): if the path is a folder, it limits the depth to load the content from.
  • typescript (boolean or object): enable TypeScript compilation. A tsconfig.json file is required in the same folder.
{
"plugins": {
"paths": [{
"path": "./my-plugin.js",
"options": {
"foo": "bar"
}
}]
}
}

typescript compilation options

The typescript option can also be an object to customize the compilation. Here are the supported options:

  • enabled (boolean): enables compilation
  • tsConfig (string): path to the tsconfig.json file relative to the configuration
  • outDir (string): the output directory of tsconfig.json, in case tsconfig.json is not available +and and enabled is set to false (procution build)
  • flags (array of string): flags to be passed to tsc. Overrides tsConfig. +

Example:

{
"plugins": {
"paths": [{
"path": "./my-plugin.js",
"options": {
"foo": "bar"
}
}],
"typescript": {
"enabled": false,
"tsConfig": "./path/to/tsconfig.json",
"outDir": "dist"
}
}
}

watch

Disable watching for file changes if set to false. It can also be customized with the following options:

  • ignore (string[], default: null): List of glob patterns to ignore when watching for changes. If null or not specified, ignore rule is not applied. Ignore option doesn't work for typescript files.

  • allow (string[], default: ['*.js', '**/*.js']): List of glob patterns to allow when watching for changes. If null or not specified, allow rule is not applied. Allow option doesn't work for typescript files.

    Example

    {
    "watch": {
    "ignore": ["*.mjs", "**/*.mjs"],
    "allow": ["my-plugin.js", "plugins/*.js"]
    }
    }

server

A required object with the following settings:

  • hostname (required, string) — Hostname where Platformatic DB server will listen for connections.

  • port (required, number) — Port where Platformatic DB server will listen for connections.

  • healthCheck (boolean or object) — Enables the health check endpoint.

    • Powered by @fastify/under-pressure.
    • The value can be an object, used to specify the interval between checks in milliseconds (default: 5000)

    Example

    {
    "server": {
    ...
    "healthCheck": {
    "interval": 2000
    }
    }
    }
  • cors (object) — Configuration for Cross-Origin Resource Sharing (CORS) headers.

    • All options will be passed to the @fastify/cors plugin. In order to specify a RegExp object, you can pass { regexp: 'yourregexp' }, +it will be automatically converted
  • https (object) - Configuration for HTTPS supporting the following options.

    • key (required, string, object, or array) - If key is a string, it specifies the private key to be used. If key is an object, it must have a path property specifying the private key file. Multiple keys are supported by passing an array of keys.
    • cert (required, string, object, or array) - If cert is a string, it specifies the certificate to be used. If cert is an object, it must have a path property specifying the certificate file. Multiple certificates are supported by passing an array of keys.
  • logger (object) -- the logger configuration.

  • pluginTimeout (integer) -- the number of milliseconds to wait for a Fastify plugin to load

  • bodyLimit (integer) -- the maximum request body size in bytes

  • maxParamLength (integer) -- the maximum length of a request parameter

  • caseSensitive (boolean) -- if true, the router will be case sensitive

  • ignoreTrailingSlash (boolean) -- if true, the router will ignore the trailing slash

  • ignoreTrailingSlash (boolean) -- if true, the router will ignore the trailing slash

  • connectionTimeout (integer) -- the milliseconds to wait for a new HTTP request

  • keepAliveTimeout (integer) -- the milliseconds to wait for a keep-alive HTTP request

  • maxRequestsPerSocket (integer) -- the maximum number of requests per socket

  • forceCloseConnections (boolean or "idle") -- if true, the server will close all connections when it is closed

  • requestTimeout (integer) -- the milliseconds to wait for a request to be completed

  • disableRequestLogging (boolean) -- if true, the request logger will be disabled

  • exposeHeadRoutes (boolean) -- if true, the router will expose HEAD routes

  • serializerOpts (object) -- the serializer options

  • requestIdHeader (string or false) -- the name of the header that will contain the request id

  • requestIdLogLabel (string) -- Defines the label used for the request identifier when logging the request. default: 'reqId'

  • jsonShorthand (boolean) -- default: true -- visit fastify docs for more details

  • trustProxy (boolean or integer or string or String[]) -- default: false -- visit fastify docs for more details

tip

See the fastify docs for more details.

authorization

An optional object with the following settings:

  • adminSecret (string): A secret that should be sent in an +x-platformatic-admin-secret HTTP header when performing GraphQL/REST API +calls. Use an environment variable placeholder +to securely provide the value for this setting.
  • roleKey (string, default: X-PLATFORMATIC-ROLE): The name of the key in user +metadata that is used to store the user's roles. See Role configuration.
  • anonymousRole (string, default: anonymous): The name of the anonymous role. See Role configuration.
  • jwt (object): Configuration for the JWT authorization strategy. +Any option accepted by @fastify/jwt +can be passed in this object.
  • webhook (object): Configuration for the Webhook authorization strategy.
    • url (required, string): Webhook URL that Platformatic DB will make a +POST request to.
  • rules (array): Authorization rules that describe the CRUD actions that +users are allowed to perform against entities. See Rules +documentation.
note

If an authorization object is present, but no rules are specified, no CRUD +operations are allowed unless adminSecret is passed.

Example

platformatic.db.json
{
"authorization": {
"jwt": {
"secret": "{PLT_AUTHORIZATION_JWT_SECRET}"
},
"rules": [
...
]
}
}

telemetry

Open Telemetry is optionally supported with these settings:

  • serviceName (required, string) — Name of the service as will be reported in open telemetry.
  • version (string) — Optional version (free form)
  • skip (array). Optional list of operations to skip when exporting telemetry defined object with properties:
    • method: GET, POST, PUT, DELETE, PATCH, HEAD, OPTIONS, TRACE
    • path. e.g.: /documentation/json
  • exporter (object or array) — Exporter configuration. If not defined, the exporter defaults to console. If an array of objects is configured, every object must be a valid exporter object. The exporter object has the following properties:
    • type (string) — Exporter type. Supported values are console, otlp, zipkin and memory (default: console). memory is only supported for testing purposes.
    • options (object) — These options are supported:
      • url (string) — The URL to send the telemetry to. Required for otlp exporter. This has no effect on console and memory exporters.
      • headers (object) — Optional headers to send with the telemetry. This has no effect on console and memory exporters.

Note that OTLP traces can be consumed by different solutions, like Jaeger. Here the full list.

Example

{
"telemetry": {
"serviceName": "test-service",
"exporter": {
"type": "otlp",
"options": {
"url": "http://localhost:4318/v1/traces"
}
}
}
}

Environment variable placeholders

The value for any configuration setting can be replaced with an environment variable +by adding a placeholder in the configuration file, for example {PLT_SERVER_LOGGER_LEVEL}.

All placeholders in a configuration must be available as an environment variable +and must meet the allowed placeholder name rules.

Example

platformatic.db.json
{
"db": {
"connectionString": "{DATABASE_URL}"
},
"server": {
"logger": {
"level": "{PLT_SERVER_LOGGER_LEVEL}"
},
"port": "{PORT}"
}
}

Platformatic will replace the placeholders in this example with the environment +variables of the same name.

Setting environment variables

If a .env file exists it will automatically be loaded by Platformatic using +dotenv. For example:

.env
PLT_SERVER_LOGGER_LEVEL=info
PORT=8080

The .env file must be located in the same folder as the Platformatic configuration +file or in the current working directory.

Environment variables can also be set directly on the commmand line, for example:

PLT_SERVER_LOGGER_LEVEL=debug npx platformatic db

Allowed placeholder names

Only placeholder names prefixed with PLT_, or that are in this allow list, will be +dynamically replaced in the configuration file:

  • PORT
  • DATABASE_URL

This restriction is to avoid accidentally exposing system environment variables. +An error will be raised by Platformatic if it finds a configuration placeholder +that isn't allowed.

The default allow list can be extended by passing a --allow-env CLI option with a +comma separated list of strings, for example:

npx platformatic db start --allow-env=HOST,SERVER_LOGGER_LEVEL
# OR
npx platformatic start --allow-env=HOST,SERVER_LOGGER_LEVEL

If --allow-env is passed as an option to the CLI, it will be merged with the +default allow list.

Sample Configuration

This is a bare minimum configuration for Platformatic DB. Uses a local ./db.sqlite SQLite database, with OpenAPI and GraphQL support.

Server will listen to http://127.0.0.1:3042

{
"server": {
"hostname": "127.0.0.1",
"port": "3042"
},
"db": {
"connectionString": "sqlite://./db.sqlite",
"graphiql": true,
"openapi": true,
"graphql": true
}
}
+ + + + \ No newline at end of file diff --git a/docs/0.42.0/reference/db/introduction/index.html b/docs/0.42.0/reference/db/introduction/index.html new file mode 100644 index 00000000000..f911c6cfe2e --- /dev/null +++ b/docs/0.42.0/reference/db/introduction/index.html @@ -0,0 +1,25 @@ + + + + + +Platformatic DB | Platformatic Open Source Software + + + + + +
+
Version: 0.42.0

Platformatic DB

Platformatic DB is an HTTP server that provides a flexible set of tools for +building robust APIs with Node.js.

For a high level overview of how Platformatic DB works, please reference the +Architecture guide.

info

Platformatic DB is currently in public beta.

Features

info

Get up and running in 2 minutes using our +Quick Start Guide

Supported databases

DatabaseVersion
SQLite3.
PostgreSQL>= 15
MySQL>= 5.7
MariaDB>= 10.11

The required database driver is automatically inferred and loaded based on the +value of the connectionString +configuration setting.

Public beta

Platformatic DB is in public beta. You can use it in production, but it's quite +likely that you'll encounter significant bugs.

If you run into a bug or have a suggestion for improvement, please +raise an issue on GitHub.

+ + + + \ No newline at end of file diff --git a/docs/0.42.0/reference/db/logging/index.html b/docs/0.42.0/reference/db/logging/index.html new file mode 100644 index 00000000000..702f258b159 --- /dev/null +++ b/docs/0.42.0/reference/db/logging/index.html @@ -0,0 +1,25 @@ + + + + + +Logging | Platformatic Open Source Software + + + + + +
+
Version: 0.42.0

Logging

Platformatic DB uses a low overhead logger named Pino +to output structured log messages.

Logger output level

By default the logger output level is set to info, meaning that all log messages +with a level of info or above will be output by the logger. See the +Pino documentation +for details on the supported log levels.

The logger output level can be overriden by adding a logger object to the server +configuration settings group:

platformatic.db.json
{
"server": {
"logger": {
"level": "error"
},
...
},
...
}

Log formatting

If you run Platformatic DB in a terminal, where standard out (stdout) +is a TTY:

  • pino-pretty is automatically used +to pretty print the logs and make them easier to read during development.
  • The Platformatic logo is printed (if colors are supported in the terminal emulator)

Example:

$ npx platformatic db start




/////////////
///// /////
/// ///
/// ///
/// ///
&& /// /// &&
&&&&&& /// /// &&&&&&
&&&& /// /// &&&&
&&& /// /// &&&&&&&&&&&&
&&& /// /////// //// && &&&&&
&& /// /////////////// &&&
&&& /// /// &&&
&&& /// // &&
&&& /// &&
&&& /// &&&
&&&& /// &&&
&&&&& /// &&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&
///
///
///
///
///
///


[11:20:33.466] INFO (337606): server listening
url: "http://127.0.0.1:3042"

If stdout is redirected to a non-TTY, the logo is not printed and the logs are +formatted as newline-delimited JSON:

$ npx platformatic db start | head
{"level":30,"time":1665566628973,"pid":338365,"hostname":"darkav2","url":"http://127.0.0.1:3042","msg":"server listening"}
+ + + + \ No newline at end of file diff --git a/docs/0.42.0/reference/db/migrations/index.html b/docs/0.42.0/reference/db/migrations/index.html new file mode 100644 index 00000000000..5e26a5cd30f --- /dev/null +++ b/docs/0.42.0/reference/db/migrations/index.html @@ -0,0 +1,17 @@ + + + + + +Migrations | Platformatic Open Source Software + + + + + +
+
Version: 0.42.0

Migrations

It uses Postgrator under the hood to run migrations. Please refer to the Postgrator documentation for guidance on writing migration files.

In brief, you should create a file structure like this

migrations/
|- 001.do.sql
|- 001.undo.sql
|- 002.do.sql
|- 002.undo.sql
|- 003.do.sql
|- 003.undo.sql
|- 004.do.sql
|- 004.undo.sql
|- ... and so on

Postgrator uses a table in your schema, to store which migrations have been already processed, so that only new ones will be applied at every server start.

You can always rollback some migrations specifing what version you would like to rollback to.

Example

$ platformatic db migrations apply --to 002

Will execute 004.undo.sql, 003.undo.sql in this order. If you keep those files in migrations directory, when the server restarts it will execute 003.do.sql and 004.do.sql in this order if the autoApply value is true, or you can run the db migrations apply command.

It's also possible to rollback a single migration with -r:

$ platformatic db migrations apply -r 

How to run migrations

There are two ways to run migrations in Platformatic DB. They can be processed automatically when the server starts if the autoApply value is true, or you can just run the db migrations apply command.

In both cases you have to edit your config file to tell Platformatic DB where are your migration files.

Automatically on server start

To run migrations when Platformatic DB starts, you need to use the config file root property migrations.

There are two options in the "migrations" property

  • dir (required) the directory where the migration files are located. It will be relative to the config file path.
  • autoApply a boolean value that tells Platformatic DB to auto-apply migrations or not (default: false)

Example

{
...
"migrations": {
"dir": "./path/to/migrations/folder",
"autoApply": false
}
}

Manually with the CLI

See documentation about db migrations apply command

In short:

  • be sure to define a correct migrations.dir folder under the config on platformatic.db.json
  • get the MIGRATION_NUMBER (f.e. if the file is named 002.do.sql will be 002)
  • run npx platformatic db migrations apply --to MIGRATION_NUMBER
+ + + + \ No newline at end of file diff --git a/docs/0.42.0/reference/db/plugin/index.html b/docs/0.42.0/reference/db/plugin/index.html new file mode 100644 index 00000000000..9b045e5a90f --- /dev/null +++ b/docs/0.42.0/reference/db/plugin/index.html @@ -0,0 +1,19 @@ + + + + + +Plugin | Platformatic Open Source Software + + + + + +
+
Version: 0.42.0

Plugin

If you want to extend Platformatic DB features, it is possible to register a plugin, which will be in the form of a standard Fastify plugin.

The config file will specify where the plugin file is located as the example below:

{
...
"plugins": {
"paths": ["./plugin/index.js"]
}
}

The paths are relative to the config file path.

Once the config file is set up, you can write your plugin to extend Platformatic DB API or write your custom business logic.

You should export an async function which receives a parameters

  • app (FastifyInstance) that is the main fastify instance running Platformatic DB
  • opts all the options specified in the config file after path
  • You can always access Platformatic data mapper through app.platformatic property.
info

To make sure that a user has the appropriate set of permissions to perform any action on an entity the context should be passed to the entity mapper operation like this:

app.post('/', async (req, reply) => {
const ctx = req.createPlatformaticCtx()

await app.platformatic.entities.movies.find({
where: { /*...*/ },
ctx
})
})

Check some examples.

Hot Reload

Plugin file is being watched by fs.watch function.

You don't need to reload Platformatic DB server while working on your plugin. Every time you save, the watcher will trigger a reload event and the server will auto-restart and load your updated code.

tip

At this time, on Linux, file watch in subdirectories is not supported due to a Node.js limitation (documented here).

Directories

The path can also be a directory. In that case, the directory will be loaded with @fastify/autoload.

Consider the following directory structure:

├── routes
│ ├── foo
│ │ ├── something.js
│ │ └── bar
│ │ └── baz.js
│ ├── single-plugin
│ │ └── utils.js
│ └── another-plugin.js
└── platformatic.service.json

By default the folder will be added as a prefix to all the routes defined within them. +See the autoload documentation for all the options to customize this behavior.

Multiple plugins

Multiple plugins can be loaded in parallel by specifying an array:

{
...
"plugins": {
"paths": [{
"path": "./plugin/index.js"
}, {
"path": "./routes/"
}]
}
}

fastify.swagger()

TypeScript and autocompletion

If you want to access any of the types provided by Platformatic DB, generate them using the platformatic db types command. +This will create a global.d.ts file that you can now import everywhere, like so:

/// <references <types="./global.d.ts" />

Remember to adjust the path to global.d.ts.

Plugin definition with TypeScript

Here is an example of writing a plugin in TypeScript:

/// <reference types="./global.d.ts" />
import { FastifyInstance, FastifyPluginOptions } from 'fastify'

export default async function (fastify: FastifyInstance, opts: FastifyPluginOptions) {
}

Note that you need to add the "plugins": { "typescript": true } configuration to your platformatic.service.json.

+ + + + \ No newline at end of file diff --git a/docs/0.42.0/reference/db/programmatic/index.html b/docs/0.42.0/reference/db/programmatic/index.html new file mode 100644 index 00000000000..a8b4524fcc1 --- /dev/null +++ b/docs/0.42.0/reference/db/programmatic/index.html @@ -0,0 +1,17 @@ + + + + + +Programmatic API | Platformatic Open Source Software + + + + + +
+
Version: 0.42.0

Programmatic API

It's possible to start an instance of Platformatic DB from JavaScript.

import { buildServer } from '@platformatic/db'

const app = await buildServer('/path/to/platformatic.db.json')

await app.start() // this will start our server

console.log('URL', app.url)

const res = await fetch(app.url)
console.log(await res.json())

// do something

await app.close()

It is also possible to customize the configuration:

import { buildServer } from '@platformatic/db'

const app = await buildServer({
server: {
hostname: '127.0.0.1',
port: 0
},
db: {
connectionString: 'sqlite://test.sqlite'
},
})

await app.start() // this will start our server

console.log('URL', app.url)

const res = await fetch(app.url)
console.log(await res.json())

// do something

await app.close()

For more details on how this is implemented, read Platformatic Service Programmatic API.

API

buildServer(config)

Returns an instance of the restartable application

RestartableApp

.start()

Listen to the hostname/port combination specified in the config.

.restart()

Restart the Fastify application

.close()

Stops the application.

+ + + + \ No newline at end of file diff --git a/docs/0.42.0/reference/db/schema-support/index.html b/docs/0.42.0/reference/db/schema-support/index.html new file mode 100644 index 00000000000..201c72e7860 --- /dev/null +++ b/docs/0.42.0/reference/db/schema-support/index.html @@ -0,0 +1,21 @@ + + + + + +Schema support | Platformatic Open Source Software + + + + + +
+
Version: 0.42.0

Schema support

It's possible to specify the schemas where the tables are located (if the database supports schemas). +PlatformaticDB will inspect this schemas to create the entities

Example

CREATE SCHEMA IF NOT EXISTS "test1";
CREATE TABLE IF NOT EXISTS test1.movies (
id INTEGER PRIMARY KEY,
title TEXT NOT NULL
);

CREATE SCHEMA IF NOT EXISTS "test2";
CREATE TABLE IF NOT EXISTS test2.users (
id INTEGER PRIMARY KEY,
title TEXT NOT NULL
);

The schemas must be specified in configuration in the schema section. +Note that if we use schemas and migrations, we must specify the schema in the migrations table as well +(with postgresql, we assume we use the default public schema).

  ...
"db": {
"connectionString": "(...)",
"schema": [
"test1", "test2"
],
"ignore": {
"versions": true
}
},
"migrations": {
"dir": "migrations",
"table": "test1.versions"
},

...

The entities name are then generated in the form schemaName + entityName, PascalCase (this is necessary to avoid name collisions in case there are tables with same name in different schemas). +So for instance for the example above we generate the Test1Movie and Test2User entities.

info

Please pay attention to the entity names when using schema, these are also used to setup authorization rules

+ + + + \ No newline at end of file diff --git a/docs/0.42.0/reference/runtime/configuration/index.html b/docs/0.42.0/reference/runtime/configuration/index.html new file mode 100644 index 00000000000..456ba39f8ba --- /dev/null +++ b/docs/0.42.0/reference/runtime/configuration/index.html @@ -0,0 +1,67 @@ + + + + + +Configuration | Platformatic Open Source Software + + + + + +
+
Version: 0.42.0

Configuration

Platformatic Runtime is configured with a configuration file. It supports the +use of environment variables as setting values with configuration placeholders.

Configuration file

If the Platformatic CLI finds a file in the current working directory matching +one of these filenames, it will automatically load it:

  • platformatic.runtime.json
  • platformatic.runtime.json5
  • platformatic.runtime.yml or platformatic.runtime.yaml
  • platformatic.runtime.tml or platformatic.runtime.toml

Alternatively, a --config option with a configuration +filepath can be passed to most platformatic runtime CLI commands.

The configuration examples in this reference use JSON.

Supported formats

FormatExtensions
JSON.json
JSON5.json5
YAML.yml, .yaml
TOML.tml

Comments are supported by the JSON5, YAML and TOML file formats.

Settings

Configuration settings are organized into the following groups:

Configuration settings containing sensitive data should be set using +configuration placeholders.

The autoload and services settings can be used together, but at least one +of them must be provided. When the configuration file is parsed, autoload +configuration is translated into services configuration.

autoload

The autoload configuration is intended to be used with monorepo applications. +autoload is an object with the following settings:

  • path (required, string) - The path to a directory containing the +microservices to load. In a traditional monorepo application, this directory is +typically named packages.
  • exclude (array of strings) - Child directories inside of path that +should not be processed.
  • mappings (object) - Each microservice is given an ID and is expected +to have a Platformatic configuration file. By default the ID is the +microservice's directory name, and the configuration file is expected to be a +well-known Platformatic configuration file. mappings can be used to override +these default values.
    • id (required, string) - The overridden ID. This becomes the new +microservice ID.
    • config (required**, string) - The overridden configuration file +name. This is the file that will be used when starting the microservice.

services

services is an array of objects that defines the microservices managed by the +runtime. Each service object supports the following settings:

  • id (required, string) - A unique identifier for the microservice. +When working with the Platformatic Composer, this value corresponds to the id +property of each object in the services section of the config file. When +working with client objects, this corresponds to the optional serviceId +property or the name field in the client's package.json file if a +serviceId is not explicitly provided.
  • path (required, string) - The path to the directory containing +the microservice.
  • config (required, string) - The configuration file used to start +the microservice.

entrypoint

The Platformatic Runtime's entrypoint is a microservice that is exposed +publicly. This value must be the ID of a service defined via the autoload or +services configuration.

hotReload

An optional boolean, defaulting to false, indicating if hot reloading should +be enabled for the runtime. If this value is set to false, it will disable +hot reloading for any microservices managed by the runtime. If this value is +true, hot reloading for individual microservices is managed by the +configuration of that microservice.

danger

While hot reloading is useful for development, it is not recommended for use in +production.

allowCycles

An optional boolean, defaulting to false, indicating if dependency cycles +are allowed between microservices managed by the runtime. When the Platformatic +Runtime parses the provided configuration, it examines the clients of each +microservice, as well as the services of Platformatic Composer applications to +build a dependency graph. A topological sort is performed on this dependency +graph so that each service is started after all of its dependencies have been +started. If there are cycles, the topological sort fails and the Runtime does +not start any applications.

If allowCycles is true, the topological sort is skipped, and the +microservices are started in the order specified in the configuration file.

telemetry

Open Telemetry is optionally supported with these settings:

  • serviceName (required, string) — Name of the service as will be reported in open telemetry. In the runtime case, the name of the services as reported in traces is ${serviceName}-${serviceId}, where serviceId is the id of the service in the runtime.
  • version (string) — Optional version (free form)
  • skip (array). Optional list of operations to skip when exporting telemetry defined object with properties:
    • method: GET, POST, PUT, DELETE, PATCH, HEAD, OPTIONS, TRACE
    • path. e.g.: /documentation/json
  • exporter (object or array) — Exporter configuration. If not defined, the exporter defaults to console. If an array of objects is configured, every object must be a valid exporter object. The exporter object has the following properties:
    • type (string) — Exporter type. Supported values are console, otlp, zipkin and memory (default: console). memory is only supported for testing purposes.
    • options (object) — These options are supported:
      • url (string) — The URL to send the telemetry to. Required for otlp exporter. This has no effect on console and memory exporters.
      • headers (object) — Optional headers to send with the telemetry. This has no effect on console and memory exporters.

Note that OTLP traces can be consumed by different solutions, like Jaeger. Here the full list.

Example

{
"telemetry": {
"serviceName": "test-service",
"exporter": {
"type": "otlp",
"options": {
"url": "http://localhost:4318/v1/traces"
}
}
}
}

Environment variable placeholders

The value for any configuration setting can be replaced with an environment +variable by adding a placeholder in the configuration file, for example +{PLT_ENTRYPOINT}.

All placeholders in a configuration must be available as an environment +variable and must meet the +allowed placeholder name rules.

Setting environment variables

If a .env file exists it will automatically be loaded by Platformatic using +dotenv. For example:

.env
PLT_ENTRYPOINT=service

The .env file must be located in the same folder as the Platformatic +configuration file or in the current working directory.

Environment variables can also be set directly on the commmand line, for example:

PLT_ENTRYPOINT=service npx platformatic runtime

Allowed placeholder names

Only placeholder names prefixed with PLT_, or that are in this allow list, +will be dynamically replaced in the configuration file:

  • PORT
  • DATABASE_URL

This restriction is to avoid accidentally exposing system environment variables. +An error will be raised by Platformatic if it finds a configuration placeholder +that isn't allowed.

The default allow list can be extended by passing a --allow-env CLI option +with a comma separated list of strings, for example:

npx platformatic runtime --allow-env=HOST,SERVER_LOGGER_LEVEL

If --allow-env is passed as an option to the CLI, it will be merged with the +default allow list.

+ + + + \ No newline at end of file diff --git a/docs/0.42.0/reference/runtime/introduction/index.html b/docs/0.42.0/reference/runtime/introduction/index.html new file mode 100644 index 00000000000..2088d15c2b2 --- /dev/null +++ b/docs/0.42.0/reference/runtime/introduction/index.html @@ -0,0 +1,33 @@ + + + + + +Platformatic Runtime | Platformatic Open Source Software + + + + + +
+
Version: 0.42.0

Platformatic Runtime

Platformatic Runtime is an environment for running multiple Platformatic +microservices as a single monolithic deployment unit.

info

Platformatic Runtime is currently in public beta.

Features

Public beta

Platformatic Runtime is in public beta. You can use it in production, but it's quite +likely that you'll encounter significant bugs.

If you run into a bug or have a suggestion for improvement, please +raise an issue on GitHub.

Standalone usage

If you're only interested in the features available in Platformatic Runtime, you can replace platformatic with @platformatic/runtime in the dependencies of your package.json, so that you'll import fewer deps.

Example configuration file

The following configuration file can be used to start a new Platformatic +Runtime project. For more details on the configuration file, see the +configuration documentation.

{
"$schema": "https://platformatic.dev/schemas/v0.26.0/runtime",
"autoload": {
"path": "./packages",
"exclude": ["docs"]
},
"entrypoint": "entrypointApp"
}

TypeScript Compilation

Platformatic Runtime streamlines the compilation of all services built on TypeScript with the command +plt runtime compile. The TypeScript compiler (tsc) is required to be installed separately.

Interservice communication

The Platformatic Runtime allows multiple microservice applications to run +within a single process. Only the entrypoint binds to an operating system +port and can be reached from outside of the runtime.

Within the runtime, all interservice communication happens by injecting HTTP +requests into the running servers, without binding them to ports. This injection +is handled by +fastify-undici-dispatcher.

Each microservice is assigned an internal domain name based on its unique ID. +For example, a microservice with the ID awesome is given the internal domain +of http://awesome.plt.local. The fastify-undici-dispatcher module maps that +domain to the Fastify server running the awesome microservice. Any Node.js +APIs based on Undici, such as fetch(), will then automatically route requests +addressed to awesome.plt.local to the corresponding Fastify server.

+ + + + \ No newline at end of file diff --git a/docs/0.42.0/reference/runtime/programmatic/index.html b/docs/0.42.0/reference/runtime/programmatic/index.html new file mode 100644 index 00000000000..d2d588c16e7 --- /dev/null +++ b/docs/0.42.0/reference/runtime/programmatic/index.html @@ -0,0 +1,28 @@ + + + + + +Programmatic API | Platformatic Open Source Software + + + + + +
+
Version: 0.42.0

Programmatic API

In many cases it's useful to start Platformatic applications using an API +instead of the command line. The @platformatic/runtime API makes it simple to +work with different application types (e.g. service, db, composer and runtime) without +needing to know the application type a priori.

buildServer()

The buildServer function creates a server from a provided configuration +object or configuration filename. +The config can be of either Platformatic Service, Platformatic DB, +Platformatic Composer or any other application built on top of +Platformatic Service.

import { buildServer } from '@platformatic/runtime'

const app = await buildServer('path/to/platformatic.runtime.json')
const entrypointUrl = await app.start()

// Make a request to the entrypoint.
const res = await fetch(entrypointUrl)
console.log(await res.json())

// Do other interesting things.

await app.close()

It is also possible to customize the configuration:

import { buildServer } from '@platformatic/runtime'

const config = {
// $schema: 'https://platformatic.dev/schemas/v0.39.0/runtime',
// $schema: 'https://platformatic.dev/schemas/v0.39.0/service',
// $schema: 'https://platformatic.dev/schemas/v0.39.0/db',
// $schema: 'https://platformatic.dev/schemas/v0.39.0/composer'
...
}
const app = await buildServer(config)

await app.start()

loadConfig()

The loadConfig function is used to read and parse a configuration file for +an arbitrary Platformatic application.

import { loadConfig } from '@platformatic/runtime'

// Read the config based on command line arguments. loadConfig() will detect
// the application type.
const config = await loadConfig({}, ['-c', '/path/to/platformatic.config.json'])

// Read the config based on command line arguments. The application type can
// be provided explicitly.
const config = await loadConfig(
{},
['-c', '/path/to/platformatic.config.json']
)

// Default config can be specified.
const config = await loadConfig(
{},
['-c', '/path/to/platformatic.config.json'],
{ key: 'value' }
)

start()

The start function loads a configuration, builds a server, and starts the +server. However, the server is not returned.

import { start } from '@platformatic/runtime'

await start(['-c', '/path/to/platformatic.config.json])

startCommand()

The startCommand function is similar to start. However, if an exception +occurs, startCommand logs the error and exits the process. This is different +from start, which throws the exception.

import { startCommand } from '@platformatic/runtime'

await startCommand(['-c', '/path/to/platformatic.config.json])
+ + + + \ No newline at end of file diff --git a/docs/0.42.0/reference/service/configuration/index.html b/docs/0.42.0/reference/service/configuration/index.html new file mode 100644 index 00000000000..5fe2004d42c --- /dev/null +++ b/docs/0.42.0/reference/service/configuration/index.html @@ -0,0 +1,38 @@ + + + + + +Configuration | Platformatic Open Source Software + + + + + +
+
Version: 0.42.0

Configuration

Platformatic Service configured with a configuration file. It supports the use +of environment variables as setting values with configuration placeholders.

Configuration file

If the Platformatic CLI finds a file in the current working directory matching +one of these filenames, it will automatically load it:

  • platformatic.service.json
  • platformatic.service.json5
  • platformatic.service.yml or platformatic.service.yaml
  • platformatic.service.tml or platformatic.service.toml

Alternatively, a --config option with a configuration +filepath can be passed to most platformatic service CLI commands.

The configuration examples in this reference use JSON.

Supported formats

FormatExtensions
JSON.json
JSON5.json5
YAML.yml, .yaml
TOML.tml

Comments are supported by the JSON5, YAML and TOML file formats.

Settings

Configuration settings are organised into the following groups:

Sensitive configuration settings, such as a database connection URL that contains +a password, should be set using configuration placeholders.

server

A required object with the following settings:

  • hostname (required, string) — Hostname where Platformatic Service server will listen for connections.

  • port (required, number) — Port where Platformatic Service server will listen for connections.

  • healthCheck (boolean or object) — Enables the health check endpoint.

    • Powered by @fastify/under-pressure.
    • The value can be an object, used to specify the interval between checks in milliseconds (default: 5000)

    Example

    {
    "server": {
    ...
    "healthCheck": {
    "interval": 2000
    }
    }
    }
  • cors (object) — Configuration for Cross-Origin Resource Sharing (CORS) headers.

    • All options will be passed to the @fastify/cors plugin. In order to specify a RegExp object, you can pass { regexp: 'yourregexp' }, +it will be automatically converted.
  • logger (object) -- the logger configuration.

  • pluginTimeout (integer) -- the number of milliseconds to wait for a Fastify plugin to load, see the fastify docs for more details.

  • https (object) - Configuration for HTTPS supporting the following options.

    • key (required, string, object, or array) - If key is a string, it specifies the private key to be used. If key is an object, it must have a path property specifying the private key file. Multiple keys are supported by passing an array of keys.
    • cert (required, string, object, or array) - If cert is a string, it specifies the certificate to be used. If cert is an object, it must have a path property specifying the certificate file. Multiple certificates are supported by passing an array of keys.

metrics

Configuration for a Prometheus server that will export monitoring metrics +for the current server instance. It uses fastify-metrics +under the hood.

This setting can be a boolean or an object. If set to true the Prometheus server will listen on http://0.0.0.0:9090.

Supported object properties:

  • hostname (string) — The hostname where Prometheus server will listen for connections.
  • port (number) — The port where Prometheus server will listen for connections.
  • auth (object) — Basic Auth configuration. username and password are required here +(use environment variables).

plugins

An optional object that defines the plugins loaded by Platformatic Service.

  • paths (required, array): an array of paths (string) +or an array of objects composed as follows,
    • path (string): Relative path to plugin's entry point.
    • options (object): Optional plugin options.
    • encapsulate (boolean): if the path is a folder, it instruct Platformatic to not encapsulate those plugins.
    • maxDepth (integer): if the path is a folder, it limits the depth to load the content from.
  • typescript (boolean or object): enable TypeScript compilation. A tsconfig.json file is required in the same folder.

Example

{
"plugins": {
"paths": [{
"path": "./my-plugin.js",
"options": {
"foo": "bar"
}
}]
}
}

typescript compilation options

The typescript can also be an object to customize the compilation. Here are the supported options:

  • enabled (boolean): enables compilation
  • tsConfig (string): path to the tsconfig.json file relative to the configuration
  • outDir (string): the output directory of tsconfig.json, in case tsconfig.json is not available +and and enabled is set to false (procution build)
  • flags (array of string): flags to be passed to tsc. Overrides tsConfig. +

Example:

{
"plugins": {
"paths": [{
"path": "./my-plugin.js",
"options": {
"foo": "bar"
}
}],
"typescript": {
"enabled": false,
"tsConfig": "./path/to/tsconfig.json",
"outDir": "dist"
}
}
}

watch

Disable watching for file changes if set to false. It can also be customized with the following options:

  • ignore (string[], default: null): List of glob patterns to ignore when watching for changes. If null or not specified, ignore rule is not applied. Ignore option doesn't work for typescript files.

  • allow (string[], default: ['*.js', '**/*.js']): List of glob patterns to allow when watching for changes. If null or not specified, allow rule is not applied. Allow option doesn't work for typescript files.

    Example

    {
    "watch": {
    "ignore": ["*.mjs", "**/*.mjs"],
    "allow": ["my-plugin.js", "plugins/*.js"]
    }
    }

service

Configure @platformatic/service specific settings such as graphql or openapi:

  • graphql (boolean or object, default: false) — Controls the GraphQL API interface, with optional GraphiQL UI.

    Examples

    Enables GraphQL support

    {
    "service": {
    "graphql": true
    }
    }

    Enables GraphQL support with GraphiQL

    {
    "service": {
    "graphql": {
    "graphiql": true
    }
    }
    }
  • openapi (boolean or object, default: false) — Enables OpenAPI REST support.

    • If value is an object, all OpenAPI v3 allowed properties can be passed. Also a prefix property can be passed to set the OpenAPI prefix.
    • Platformatic Service uses @fastify/swagger under the hood to manage this configuration.

    Examples

    Enables OpenAPI

    {
    "service": {
    ...
    "openapi": true
    }
    }

    Enables OpenAPI with prefix

    {
    "service": {
    "openapi": {
    "prefix": "/api"
    }
    }
    }

    Enables OpenAPI with options

    {
    "service": {
    "openapi": {
    "info": {
    "title": "Platformatic Service",
    "description": "Exposing a SQL database as REST"
    }
    }
    }
    }

telemetry

Open Telemetry is optionally supported with these settings:

  • serviceName (required, string) — Name of the service as will be reported in open telemetry.
  • version (string) — Optional version (free form)
  • skip (array). Optional list of operations to skip when exporting telemetry defined object with properties:
    • method: GET, POST, PUT, DELETE, PATCH, HEAD, OPTIONS, TRACE
    • path. e.g.: /documentation/json
  • exporter (object or array) — Exporter configuration. If not defined, the exporter defaults to console. If an array of objects is configured, every object must be a valid exporter object. The exporter object has the following properties:
    • type (string) — Exporter type. Supported values are console, otlp, zipkin and memory (default: console). memory is only supported for testing purposes.
    • options (object) — These options are supported:
      • url (string) — The URL to send the telemetry to. Required for otlp exporter. This has no effect on console and memory exporters.
      • headers (object) — Optional headers to send with the telemetry. This has no effect on console and memory exporters.

Note that OTLP traces can be consumed by different solutions, like Jaeger. Here the full list.

Example

{
"telemetry": {
"serviceName": "test-service",
"exporter": {
"type": "otlp",
"options": {
"url": "http://localhost:4318/v1/traces"
}
}
}
}

Environment variable placeholders

The value for any configuration setting can be replaced with an environment variable +by adding a placeholder in the configuration file, for example {PLT_SERVER_LOGGER_LEVEL}.

All placeholders in a configuration must be available as an environment variable +and must meet the allowed placeholder name rules.

Example

platformatic.service.json
{
"server": {
"port": "{PORT}"
}
}

Platformatic will replace the placeholders in this example with the environment +variables of the same name.

Setting environment variables

If a .env file exists it will automatically be loaded by Platformatic using +dotenv. For example:

.env
PLT_SERVER_LOGGER_LEVEL=info
PORT=8080

The .env file must be located in the same folder as the Platformatic configuration +file or in the current working directory.

Environment variables can also be set directly on the commmand line, for example:

PLT_SERVER_LOGGER_LEVEL=debug npx platformatic service

Allowed placeholder names

Only placeholder names prefixed with PLT_, or that are in this allow list, will be +dynamically replaced in the configuration file:

  • PORT
  • DATABASE_URL

This restriction is to avoid accidentally exposing system environment variables. +An error will be raised by Platformatic if it finds a configuration placeholder +that isn't allowed.

The default allow list can be extended by passing a --allow-env CLI option with a +comma separated list of strings, for example:

npx platformatic service --allow-env=HOST,SERVER_LOGGER_LEVEL

If --allow-env is passed as an option to the CLI, it will be merged with the +default allow list.

+ + + + \ No newline at end of file diff --git a/docs/0.42.0/reference/service/introduction/index.html b/docs/0.42.0/reference/service/introduction/index.html new file mode 100644 index 00000000000..7d9ec65135b --- /dev/null +++ b/docs/0.42.0/reference/service/introduction/index.html @@ -0,0 +1,21 @@ + + + + + +Platformatic Service | Platformatic Open Source Software + + + + + +
+
Version: 0.42.0

Platformatic Service

Platformatic Service is an HTTP server that provides a developer tools for +building robust APIs with Node.js.

For a high level overview of how Platformatic DB works, please reference the +Architecture guide.

info

Platformatic Service is currently in public beta.

Features

Public beta

Platformatic Service is in public beta. You can use it in production, but it's quite +likely that you'll encounter significant bugs.

If you run into a bug or have a suggestion for improvement, please +raise an issue on GitHub.

Standalone usage

If you're only interested in the features available in Platformatic Service, you can simply switch platformatic with @platformatic/service in the dependencies of your package.json, so that you'll only import fewer deps.

You can use the plt-service command, it's the equivalent of plt service.

+ + + + \ No newline at end of file diff --git a/docs/0.42.0/reference/service/plugin/index.html b/docs/0.42.0/reference/service/plugin/index.html new file mode 100644 index 00000000000..b3880c1d35c --- /dev/null +++ b/docs/0.42.0/reference/service/plugin/index.html @@ -0,0 +1,21 @@ + + + + + +Plugin | Platformatic Open Source Software + + + + + +
+
Version: 0.42.0

Plugin

If you want to add features to a service, you will need to register a plugin, which will be in the form of a standard Fastify plugin.

The config file will specify where the plugin file is located as the example below:

{
...
"plugins": {
"paths": ["./plugin/index.js"]
}
}

The path is relative to the config file path.

You should export an async function which receives a parameters

  • app (FastifyInstance) that is the main fastify instance
  • opts all the options specified in the config file after path

Hot Reload

Plugin file is being watched by fs.watch function.

You don't need to reload Platformatic Service server while working on your plugin. Every time you save, the watcher will trigger a reload event and the server will auto-restart and load your updated code.

tip

At this time, on Linux, file watch in subdirectories is not supported due to a Node.js limitation (documented here).

Directories

The path can also be a directory. In that case, the directory will be loaded with @fastify/autoload.

Consider the following directory structure:

├── routes
│ ├── foo
│ │ ├── something.js
│ │ └── bar
│ │ └── baz.js
│ ├── single-plugin
│ │ └── utils.js
│ └── another-plugin.js
└── platformatic.service.json

By default the folder will be added as a prefix to all the routes defined within them. +See the autoload documentation for all the options to customize this behavior.

Multiple plugins

Multiple plugins can be loaded in parallel by specifying an array:

{
...
"plugins": {
"paths": [{
"path": "./plugin/index.js"
}, {
"path": "./routes/"
}]
}
}

TypeScript and Autocompletion

In order to provide the correct typings of the features added by Platformatic Service to your Fastify instance, +add the following at the top of your files:

/// <references types="@platformatic/service" />

Plugin definition with TypeScript

Here is an example of writing a plugin in TypeScript:

/// <reference types="@platformatic/service" />
import { FastifyInstance, FastifyPluginOptions } from 'fastify'

export default async function (fastify: FastifyInstance, opts: FastifyPluginOptions) {
}

Note that you need to add the "typescript": true configuration to your platformatic.service.json.

Loading compiled files

Setting "typescript": false but including a tsconfig.json with an outDir +option, will instruct Platformatic Service to try loading your plugins from that folder instead. +This setup is needed to support pre-compiled sources to reduce cold start time during deployment.

+ + + + \ No newline at end of file diff --git a/docs/0.42.0/reference/service/programmatic/index.html b/docs/0.42.0/reference/service/programmatic/index.html new file mode 100644 index 00000000000..afaaedadc1f --- /dev/null +++ b/docs/0.42.0/reference/service/programmatic/index.html @@ -0,0 +1,19 @@ + + + + + +Programmatic API | Platformatic Open Source Software + + + + + +
+
Version: 0.42.0

Programmatic API

In many cases it's useful to start Platformatic Service using an API instead of +command line, e.g. in tests we want to start and stop our server.

The buildServer function allows that:

import { buildServer } from '@platformatic/service'

const app = await buildServer('path/to/platformatic.service.json')

await app.start()

const res = await fetch(app.url)
console.log(await res.json())

// do something

await app.close()

It is also possible to customize the configuration:

import { buildServer } from '@platformatic/service'

const app = await buildServer({
server: {
hostname: '127.0.0.1',
port: 0
}
})

await app.start()

const res = await fetch(app.url)
console.log(await res.json())

// do something

await app.close()

Creating a reusable application on top of Platformatic Service

Platformatic DB is built on top of Platformatic Serivce. +If you want to build a similar kind of tool, follow this example:

import { buildServer, schema } from '@platformatic/service'

async function myPlugin (app, opts) {
// app.platformatic.configManager contains an instance of the ConfigManager
console.log(app.platformatic.configManager.current)

await platformaticService(app, opts)
}

// break Fastify encapsulation
myPlugin[Symbol.for('skip-override')] = true
myPlugin.configType = 'myPlugin'

// This is the schema for this reusable application configuration file,
// customize at will but retain the base properties of the schema from
// @platformatic/service
myPlugin.schema = schema

// The configuration of the ConfigManager
myPlugin.configManagerConfig = {
schema: foo.schema,
envWhitelist: ['PORT', 'HOSTNAME'],
allowToWatch: ['.env'],
schemaOptions: {
useDefaults: true,
coerceTypes: true,
allErrors: true,
strict: false
},
async transformConfig () {
console.log(this.current) // this is the current config

// In this method you can alter the configuration before the application
// is started. It's useful to apply some defaults that cannot be derived
// inside the schema, such as resolving paths.
}
}


const server = await buildServer('path/to/config.json', myPlugin)

await server.start()

const res = await fetch(server.listeningOrigin)
console.log(await res.json())

// do something

await service.close()
+ + + + \ No newline at end of file diff --git a/docs/0.42.0/reference/sql-events/fastify-plugin/index.html b/docs/0.42.0/reference/sql-events/fastify-plugin/index.html new file mode 100644 index 00000000000..f605d2bede8 --- /dev/null +++ b/docs/0.42.0/reference/sql-events/fastify-plugin/index.html @@ -0,0 +1,19 @@ + + + + + +Fastify Plugin | Platformatic Open Source Software + + + + + +
+
Version: 0.42.0

Fastify Plugin

The @platformatic/sql-events package exports a Fastify plugin that can be used out-of the box in a server application. +It requires that @platformatic/sql-mapper is registered before it.

The plugin has the following options:

The plugin adds the following properties to the app.platformatic object:

  • mq — an instance of mqemitter
  • subscribe(topics) — a method to create a node Readable +that will contain the events emitted by those topics.

Each entities of app.platformatic.entities will be augmented with two functions:

  • entity.getPublishTopic({ ctx, data, action })
  • entity.getSubscriptionTopic({ ctx, action })

Where ctx is the GraphQL Context, data is the object that will be emitted and action is either save or delete.

Usage

'use strict'

const Fastify = require('fastify')
const mapper = require('@platformatic/sql-mapper')
const events = require('@platformatic/sql-events')

async function main() {
const app = Fastify({
logger: {
level: 'info'
}
})
app.register(mapper.plugin, {
connectionString: 'postgres://postgres:postgres@127.0.0.1/postgres'
})

app.register(events)

// setup your routes


await app.listen({ port: 3333 })
}

main()
+ + + + \ No newline at end of file diff --git a/docs/0.42.0/reference/sql-events/introduction/index.html b/docs/0.42.0/reference/sql-events/introduction/index.html new file mode 100644 index 00000000000..d24ab479d5a --- /dev/null +++ b/docs/0.42.0/reference/sql-events/introduction/index.html @@ -0,0 +1,21 @@ + + + + + +Introduction to the sql-events module | Platformatic Open Source Software + + + + + +
+
Version: 0.42.0

Introduction to the sql-events module

The Platformatic DB sql-events uses mqemitter to publish events when entities are saved and deleted.

These events are useful to distribute updates to clients, e.g. via WebSocket, Server-Sent Events, or GraphQL Subscritions. +When subscribing and using a multi-process system with a broker like Redis, a subscribed topic will receive the data from all +the other processes.

They are not the right choice for executing some code whenever an entity is created, modified or deleted, in that case +use @platformatic/sql-mapper hooks.

Install

You can use together with @platformatic/sql-mapper.

npm i @platformatic/sql-mapper @platformatic/sql-events

Usage

const { connect } = require('@platformatic/sql-mapper')
const { setupEmitter } = require('@platformatic/sql-events')
const { pino } = require('pino')

const log = pino()

async function onDatabaseLoad (db, sql) {
await db.query(sql`CREATE TABLE pages (
id SERIAL PRIMARY KEY,
title VARCHAR(255) NOT NULL
);`)
}
const connectionString = 'postgres://postgres:postgres@localhost:5432/postgres'
const mapper = await connect({
connectionString,
log,
onDatabaseLoad,
ignore: {},
hooks: {
Page: {
find: async function(_find, opts) {
console.log('hook called');
return await _find(opts)
}
}
}
})

setupEmitter({ mapper, log })

const pageEntity = mapper.entities.page

const queue = await mapper.subscribe([
pageEntity.getSubscriptionTopic({ action: 'save' }),
pageEntity.getSubscriptionTopic({ action: 'delete' })
])

const page = await pageEntity.save({
input: { title: 'fourth page' }
})

const page2 = await pageEntity.save({
input: {
id: page.id,
title: 'fifth page'
}
})

await pageEntity.delete({
where: {
id: {
eq: page.id
}
},
fields: ['id', 'title']
})

for await (const ev of queue) {
console.log(ev)
if (expected.length === 0) {
break
}
}

process.exit(0)

API

The setupEmitter function has the following options:

The setupEmitter functions adds the following properties to the mapper object:

  • mq — an instance of mqemitter
  • subscribe(topics) — a method to create a node Readable +that will contain the events emitted by those topics.

Each entities of app.platformatic.entities will be augmented with two functions:

  • entity.getPublishTopic({ ctx, data, action })
  • entity.getSubscriptionTopic({ ctx, action })

Where ctx is the GraphQL Context, data is the object that will be emitted and action is either save or delete.

+ + + + \ No newline at end of file diff --git a/docs/0.42.0/reference/sql-graphql/ignore/index.html b/docs/0.42.0/reference/sql-graphql/ignore/index.html new file mode 100644 index 00000000000..c9429590203 --- /dev/null +++ b/docs/0.42.0/reference/sql-graphql/ignore/index.html @@ -0,0 +1,17 @@ + + + + + +Ignoring types and fields | Platformatic Open Source Software + + + + + + + + + + \ No newline at end of file diff --git a/docs/0.42.0/reference/sql-graphql/introduction/index.html b/docs/0.42.0/reference/sql-graphql/introduction/index.html new file mode 100644 index 00000000000..9862f5e25f2 --- /dev/null +++ b/docs/0.42.0/reference/sql-graphql/introduction/index.html @@ -0,0 +1,21 @@ + + + + + +Introduction to the GraphQL API | Platformatic Open Source Software + + + + + +
+
Version: 0.42.0

Introduction to the GraphQL API

The Platformatic DB GraphQL plugin starts a GraphQL server wand makes it available +via a /graphql endpoint. This endpoint is automatically ready to run queries and +mutations against your entities. This functionality is powered by +Mercurius.

GraphiQL

The GraphiQL web UI is integrated into +Platformatic DB. To enable it you can pass an option to the sql-graphql plugin:

app.register(graphqlPlugin, { graphiql: true })

The GraphiQL interface is made available under the /graphiql path.

+ + + + \ No newline at end of file diff --git a/docs/0.42.0/reference/sql-graphql/many-to-many/index.html b/docs/0.42.0/reference/sql-graphql/many-to-many/index.html new file mode 100644 index 00000000000..fadf5aea7b1 --- /dev/null +++ b/docs/0.42.0/reference/sql-graphql/many-to-many/index.html @@ -0,0 +1,20 @@ + + + + + +Many To Many Relationship | Platformatic Open Source Software + + + + + +
+
Version: 0.42.0

Many To Many Relationship

Many-to-Many relationship lets you relate each row in one table to many rows in +another table and vice versa.

Many-to-many relationship are implemented in SQL via a "join table", a table whose primary key +is composed by the identifier of the two parts of the many-to-many relationship.

Platformatic DB fully support many-to-many relationships on all supported databases.

Example

Consider the following schema (SQLite):

CREATE TABLE pages (
id INTEGER PRIMARY KEY,
the_title VARCHAR(42)
);

CREATE TABLE users (
id INTEGER PRIMARY KEY,
username VARCHAR(255) NOT NULL
);

CREATE TABLE editors (
page_id INTEGER NOT NULL,
user_id INTEGER NOT NULL,
role VARCHAR(255) NOT NULL,
CONSTRAINT fk_editor_pages FOREIGN KEY (page_id) REFERENCES pages(id),
CONSTRAINT fk_editor_users FOREIGN KEY (user_id) REFERENCES users(id),
PRIMARY KEY (page_id, user_id)
);

The table editors is a "join table" between users and pages. +Given this schema, you could issue queries like:

query {
editors(orderBy: { field: role, direction: DESC }) {
user {
id
username
}
page {
id
theTitle
}
role
}
}

Mutation works exactly the same as before:

mutation {
saveEditor(input: { userId: "1", pageId: "1", role: "captain" }) {
user {
id
username
}
page {
id
theTitle
}
role
}
}
+ + + + \ No newline at end of file diff --git a/docs/0.42.0/reference/sql-graphql/mutations/index.html b/docs/0.42.0/reference/sql-graphql/mutations/index.html new file mode 100644 index 00000000000..ca682a9ad6b --- /dev/null +++ b/docs/0.42.0/reference/sql-graphql/mutations/index.html @@ -0,0 +1,20 @@ + + + + + +Mutations | Platformatic Open Source Software + + + + + +
+
Version: 0.42.0

Mutations

When the GraphQL plugin is loaded, some mutations are automatically adding to +the GraphQL schema.

save[ENTITY]

Saves a new entity to the database or updates an existing entity. +This actually behaves as an upsert, allowing both behaviours depending on the presence of the primary key field.

Example

'use strict'

const Fastify = require('fastify')
const graphqlPlugin = require('@platformatic/sql-graphql')
const sqlMapper = require('@platformatic/sql-mapper')

async function main() {
const app = Fastify({
logger: {
level: 'info'
}
})
app.register(sqlMapper, {
connectionString: 'postgres://postgres:postgres@127.0.0.1/postgres',
log: logger,
})
app.register(graphqlPlugin, {
graphiql: true
})
const res = await app.inject({
method: 'POST',
url: '/graphql',
body: {
query: `
mutation {
savePage(input: { id: 3 title: "Platformatic is cool!" }) {
id
title
}
}
`
}
})
const result = await res.json()
console.log(result.data) // { savePage: { id: '3', title: 'Platformatic is cool!' } }
await app.close()
}

main()

insert[ENTITY]

Inserts a new entity in the database.

Example

'use strict'

const Fastify = require('fastify')
const graphqlPlugin = require('@platformatic/sql-graphql')
const sqlMapper = require('@platformatic/sql-mapper')

async function main() {
const app = Fastify({
logger: {
level: 'info'
}
})
app.register(sqlMapper, {
connectionString: 'postgres://postgres:postgres@127.0.0.1/postgres',
log: logger,
})
app.register(graphqlPlugin, {
graphiql: true
})
const res = await app.inject({
method: 'POST',
url: '/graphql',
body: {
query: `
mutation {
savePage(input: { title: "Platformatic is cool!" }) {
id
title
}
}
`
}
})
const result = await res.json()
console.log(result.data) // { savePage: { id: '4', title: 'Platformatic is cool!' } }
await app.close()
}

main()

delete[ENTITIES]

Deletes one or more entities from the database, based on the where clause +passed as an input to the mutation.

Example

'use strict'

const Fastify = require('fastify')
const graphqlPlugin = require('@platformatic/sql-graphql')
const sqlMapper = require('@platformatic/sql-mapper')

async function main() {
const app = Fastify({
logger: {
level: 'info'
}
})
app.register(sqlMapper, {
connectionString: 'postgres://postgres:postgres@127.0.0.1/postgres',
log: logger,
})
app.register(graphqlPlugin, {
graphiql: true
})
const res = await app.inject({
method: 'POST',
url: '/graphql',
body: {
query: `
mutation {
deletePages(where: { id: { eq: "3" } }) {
id
title
}
}
`
}
})
const result = await res.json()
console.log(result.data) // { deletePages: [ { id: '3', title: 'Platformatic is cool!' } ] }
await app.close()
}

main()
+ + + + \ No newline at end of file diff --git a/docs/0.42.0/reference/sql-graphql/queries/index.html b/docs/0.42.0/reference/sql-graphql/queries/index.html new file mode 100644 index 00000000000..0c810fbbd73 --- /dev/null +++ b/docs/0.42.0/reference/sql-graphql/queries/index.html @@ -0,0 +1,21 @@ + + + + + +Queries | Platformatic Open Source Software + + + + + +
+
Version: 0.42.0

Queries

A GraphQL query is automatically added to the GraphQL schema for each database +table, along with a complete mapping for all table fields.

Example

'use strict'

const Fastify = require('fastify')
const graphqlPlugin = require('@platformatic/sql-graphql')
const sqlMapper = require('@platformatic/sql-mapper')
async function main() {
const app = Fastify({
logger: {
level: 'info'
}
})
app.register(sqlMapper, {
connectionString: 'postgres://postgres:postgres@127.0.0.1/postgres'
})
app.register(graphqlPlugin, {
graphiql: true
})
const res = await app.inject({
method: 'POST',
url: '/graphql',
body: {
query: `
query{
pages{
id,
title
}
}
`
}
})
const result = await res.json()
console.log(result.data)
await app.close()
}
main()

Advanced Queries

The following additional queries are added to the GraphQL schema for each entity:

get[ENTITY]by[PRIMARY_KEY]

If you have a table pages with the field id as the primary key, you can run +a query called getPageById.

Example

...
const res = await app.inject({
method: 'POST',
url: '/graphql',
body: {
query: `
query{
getPageById(id: 3) {
id,
title
}
}
`
}
})
const result = await res.json()
console.log(result.data) // { getPageById: { id: '3', title: 'A fiction' } }

count[ENTITIES]

...
const res = await app.inject({
method: 'POST',
url: '/graphql',
body: {
query: `
query {
countPages {
total
}
}
`
}
})
const result = await res.json()
console.log(result.data) // { countMovies : { total: { 17 } }

Pagination

The Platformatic DB supports for result's pagination through input parameters: limit and offset

Example

{
users(limit:5, offset: 10) {
name
}
}

It returns 5 users starting from position 10.

Limit

By default a limit value (10) is applied to each request.

Clients can override this behavior by passing a value. +In this case the server validates the input and an error is return if exceeds the max accepted value (100).

Limit's values can be customized through configuration:

{
...
"db": {
...
"limit": {
"default": 50,
"max": 1000
}
}
}

Limit only accepts values >= 0. Otherwise an error is return.

Offset

By default offset is not applied to the request. +Clients can override this behavior by passing a value.

Offset only accepts values >= 0. Otherwise an error is return.

+ + + + \ No newline at end of file diff --git a/docs/0.42.0/reference/sql-graphql/subscriptions/index.html b/docs/0.42.0/reference/sql-graphql/subscriptions/index.html new file mode 100644 index 00000000000..ef80e798fd8 --- /dev/null +++ b/docs/0.42.0/reference/sql-graphql/subscriptions/index.html @@ -0,0 +1,19 @@ + + + + + +Subscription | Platformatic Open Source Software + + + + + +
+
Version: 0.42.0

Subscription

When the GraphQL plugin is loaded, some subscriptions are automatically adding to +the GraphQL schema if the @platformatic/sql-events plugin has been previously registered.

It's possible to avoid creating the subscriptions for a given entity by adding the subscriptionIgnore config, +like so: subscriptionIgnore: ['page'].

[ENTITY]Saved

Published whenever an entity is saved, e.g. when the mutation insert[ENTITY] or save[ENTITY] are called.

[ENTITY]Deleted

Published whenever an entity is deleted, e.g. when the mutation delete[ENTITY] is called..

+ + + + \ No newline at end of file diff --git a/docs/0.42.0/reference/sql-mapper/entities/api/index.html b/docs/0.42.0/reference/sql-mapper/entities/api/index.html new file mode 100644 index 00000000000..4d8e34974bb --- /dev/null +++ b/docs/0.42.0/reference/sql-mapper/entities/api/index.html @@ -0,0 +1,18 @@ + + + + + +API | Platformatic Open Source Software + + + + + +
+
Version: 0.42.0

API

A set of operation methods are available on each entity:

Returned fields

The entity operation methods accept a fields option that can specify an array of field names to be returned. If not specified, all fields will be returned.

Where clause

The entity operation methods accept a where option to allow limiting of the database rows that will be affected by the operation.

The where object's key is the field you want to check, the value is a key/value map where the key is an operator (see the table below) and the value is the value you want to run the operator against.

Platformatic operatorSQL operator
eq'='
in'IN'
nin'NOT IN'
neq'<>'
gt'>'
gte'>='
lt'<'
lte'<='
like'LIKE'

Examples

Selects row with id = 1

{
...
"where": {
id: {
eq: 1
}
}
}

Select all rows with id less than 100

{
...
"where": {
id: {
lt: 100
}
}
}

Select all rows with id 1, 3, 5 or 7

{
...
"where": {
id: {
in: [1, 3, 5, 7]
}
}
}

Where clause operations are by default combined with the AND operator. To combine them with the OR operator, use the or key.

Select all rows with id 1 or 3

{
...
"where": {
or: [
{
id: {
eq: 1
}
},
{
id: {
eq: 3
}
}
]
}
}

Select all rows with id 1 or 3 and title like 'foo%'

{
...
"where": {
or: [
{
id: {
eq: 1
}
},
{
id: {
eq: 3
}
}
],
title: {
like: 'foo%'
}
}
}

Reference

find

Retrieve data for an entity from the database.

Options

NameTypeDescription
fieldsArray of stringList of fields to be returned for each object
whereObjectWhere clause 🔗
orderByArray of ObjectObject like { field: 'counter', direction: 'ASC' }
limitNumberLimits the number of returned elements
offsetNumberThe offset to start looking for rows from

Usage

'use strict'

const { connect } = require('@platformatic/sql-mapper')
const { pino } = require('pino')
const pretty = require('pino-pretty')
const logger = pino(pretty())

async function main() {
const pgConnectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
const mapper = await connect({
connectionString: pgConnectionString,
log: logger,
})
const res = await mapper.entities.page.find({
fields: ['id', 'title',],
where: {
id: {
lt: 10
}
},
})
logger.info(res)
await mapper.db.dispose()
}
main()

count

Same as find, but only count entities.

Options

NameTypeDescription
whereObjectWhere clause 🔗

Usage

'use strict'

const { connect } = require('@platformatic/sql-mapper')
const { pino } = require('pino')
const pretty = require('pino-pretty')
const logger = pino(pretty())

async function main() {
const pgConnectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
const mapper = await connect({
connectionString: pgConnectionString,
log: logger,
})
const res = await mapper.entities.page.count({
where: {
id: {
lt: 10
}
},
})
logger.info(res)
await mapper.db.dispose()
}
main()

insert

Insert one or more entity rows in the database.

Options

NameTypeDescription
fieldsArray of stringList of fields to be returned for each object
inputsArray of ObjectEach object is a new row

Usage

'use strict'

const { connect } = require('@platformatic/sql-mapper')
const { pino } = require('pino')
const pretty = require('pino-pretty')
const logger = pino(pretty())

async function main() {
const pgConnectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
const mapper = await connect({
connectionString: pgConnectionString,
log: logger,
})
const res = await mapper.entities.page.insert({
fields: ['id', 'title' ],
inputs: [
{ title: 'Foobar' },
{ title: 'FizzBuzz' }
],
})
logger.info(res)
/**
0: {
"id": "16",
"title": "Foobar"
}
1: {
"id": "17",
"title": "FizzBuzz"
}
*/
await mapper.db.dispose()
}
main()

save

Create a new entity row in the database or update an existing one.

To update an existing entity, the id field (or equivalent primary key) must be included in the input object. +save actually behaves as an upsert, allowing both behaviours depending on the presence of the primary key field.

Options

NameTypeDescription
fieldsArray of stringList of fields to be returned for each object
inputObjectThe single row to create/update

Usage

'use strict'
const { connect } = require('@platformatic/sql-mapper')
const { pino } = require('pino')
const pretty = require('pino-pretty')
const logger = pino(pretty())

async function main() {
const connectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
const mapper = await connect({
connectionString: connectionString,
log: logger,
})
const res = await mapper.entities.page.save({
fields: ['id', 'title' ],
input: { id: 1, title: 'FizzBuzz' },
})
logger.info(res)
await mapper.db.dispose()
}
main()

delete

Delete one or more entity rows from the database, depending on the where option. Returns the data for all deleted objects.

Options

NameTypeDescription
fieldsArray of stringList of fields to be returned for each object
whereObjectWhere clause 🔗

Usage

'use strict'
const { connect } = require('@platformatic/sql-mapper')
const { pino } = require('pino')
const pretty = require('pino-pretty')
const logger = pino(pretty())

async function main() {
const connectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
const mapper = await connect({
connectionString: connectionString,
log: logger,
})
const res = await mapper.entities.page.delete({
fields: ['id', 'title',],
where: {
id: {
lt: 4
}
},
})
logger.info(res)
await mapper.db.dispose()
}
main()

updateMany

Update one or more entity rows from the database, depending on the where option. Returns the data for all updated objects.

Options

NameTypeDescription
whereObjectWhere clause 🔗
inputObjectThe new values that want to update
fieldsArray of stringList of fields to be returned for each object

Usage

'use strict'
const { connect } = require('@platformatic/sql-mapper')
const { pino } = require('pino')
const pretty = require('pino-pretty')
const logger = pino(pretty())

async function main() {
const connectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
const mapper = await connect({
connectionString: connectionString,
log: logger,
})
const res = await mapper.entities.page.updateMany({
fields: ['id', 'title',],
where: {
counter: {
gte: 30
}
},
input: {
title: 'Updated title'
}
})
logger.info(res)
await mapper.db.dispose()
}
main()

+ + + + \ No newline at end of file diff --git a/docs/0.42.0/reference/sql-mapper/entities/example/index.html b/docs/0.42.0/reference/sql-mapper/entities/example/index.html new file mode 100644 index 00000000000..dd5ee8f7a0f --- /dev/null +++ b/docs/0.42.0/reference/sql-mapper/entities/example/index.html @@ -0,0 +1,17 @@ + + + + + +Example | Platformatic Open Source Software + + + + + +
+
Version: 0.42.0

Example

Given this PostgreSQL SQL schema:

CREATE TABLE "categories" (
"id" int4 NOT NULL DEFAULT nextval('categories_id_seq'::regclass),
"name" varchar(255) NOT NULL,
PRIMARY KEY ("id")
);

CREATE TABLE "pages" (
"id" int4 NOT NULL DEFAULT nextval('pages_id_seq'::regclass),
"title" varchar(255) NOT NULL,
"category_id" int4,
"user_id" int4,
PRIMARY KEY ("id")
);

ALTER TABLE "pages" ADD FOREIGN KEY ("category_id") REFERENCES "categories"("id");

app.platformatic.entities will contain this mapping object:

{
"category": {
"name": "Category",
"singularName": "category",
"pluralName": "categories",
"primaryKey": "id",
"table": "categories",
"fields": {
"id": {
"sqlType": "int4",
"isNullable": false,
"primaryKey": true,
"camelcase": "id"
},
"name": {
"sqlType": "varchar",
"isNullable": false,
"camelcase": "name"
}
},
"camelCasedFields": {
"id": {
"sqlType": "int4",
"isNullable": false,
"primaryKey": true,
"camelcase": "id"
},
"name": {
"sqlType": "varchar",
"isNullable": false,
"camelcase": "name"
}
},
"relations": [],
"reverseRelationships": [
{
"sourceEntity": "Page",
"relation": {
"constraint_catalog": "postgres",
"constraint_schema": "public",
"constraint_name": "pages_category_id_fkey",
"table_catalog": "postgres",
"table_schema": "public",
"table_name": "pages",
"constraint_type": "FOREIGN KEY",
"is_deferrable": "NO",
"initially_deferred": "NO",
"enforced": "YES",
"column_name": "category_id",
"ordinal_position": 1,
"position_in_unique_constraint": 1,
"foreign_table_name": "categories",
"foreign_column_name": "id"
}
}
]
},
"page": {
"name": "Page",
"singularName": "page",
"pluralName": "pages",
"primaryKey": "id",
"table": "pages",
"fields": {
"id": {
"sqlType": "int4",
"isNullable": false,
"primaryKey": true,
"camelcase": "id"
},
"title": {
"sqlType": "varchar",
"isNullable": false,
"camelcase": "title"
},
"category_id": {
"sqlType": "int4",
"isNullable": true,
"foreignKey": true,
"camelcase": "categoryId"
},
"user_id": {
"sqlType": "int4",
"isNullable": true,
"camelcase": "userId"
}
},
"camelCasedFields": {
"id": {
"sqlType": "int4",
"isNullable": false,
"primaryKey": true,
"camelcase": "id"
},
"title": {
"sqlType": "varchar",
"isNullable": false,
"camelcase": "title"
},
"categoryId": {
"sqlType": "int4",
"isNullable": true,
"foreignKey": true,
"camelcase": "categoryId"
},
"userId": {
"sqlType": "int4",
"isNullable": true,
"camelcase": "userId"
}
},
"relations": [
{
"constraint_catalog": "postgres",
"constraint_schema": "public",
"constraint_name": "pages_category_id_fkey",
"table_catalog": "postgres",
"table_schema": "public",
"table_name": "pages",
"constraint_type": "FOREIGN KEY",
"is_deferrable": "NO",
"initially_deferred": "NO",
"enforced": "YES",
"column_name": "category_id",
"ordinal_position": 1,
"position_in_unique_constraint": 1,
"foreign_table_name": "categories",
"foreign_column_name": "id"
}
],
"reverseRelationships": []
}
}
+ + + + \ No newline at end of file diff --git a/docs/0.42.0/reference/sql-mapper/entities/fields/index.html b/docs/0.42.0/reference/sql-mapper/entities/fields/index.html new file mode 100644 index 00000000000..bba7a7d20d0 --- /dev/null +++ b/docs/0.42.0/reference/sql-mapper/entities/fields/index.html @@ -0,0 +1,17 @@ + + + + + +Fields | Platformatic Open Source Software + + + + + +
+
Version: 0.42.0

Fields

When Platformatic DB inspects a database's schema, it creates an object for each table that contains a mapping of their fields.

These objects contain the following properties:

  • singularName: singular entity name, based on table name. Uses inflected under the hood.
  • pluralName: plural entity name (i.e 'pages')
  • primaryKey: the field which is identified as primary key.
  • table: original table name
  • fields: an object containing all fields details. Object key is the field name.
  • camelCasedFields: an object containing all fields details in camelcase. If you have a column named user_id you can access it using both userId or user_id

Fields detail

  • sqlType: The original field type. It may vary depending on the underlying DB Engine
  • isNullable: Whether the field can be null or not
  • primaryKey: Whether the field is the primary key or not
  • camelcase: The camelcased value of the field

Example

Given this SQL Schema (for PostgreSQL):

CREATE SEQUENCE IF NOT EXISTS pages_id_seq;
CREATE TABLE "public"."pages" (
"id" int4 NOT NULL DEFAULT nextval('pages_id_seq'::regclass),
"title" varchar,
"body_content" text,
"category_id" int4,
PRIMARY KEY ("id")
);

The resulting mapping object will be:

{
singularName: 'page',
pluralName: 'pages',
primaryKey: 'id',
table: 'pages',
fields: {
id: {
sqlType: 'int4',
isNullable: false,
primaryKey: true,
camelcase: 'id'
},
title: {
sqlType: 'varchar',
isNullable: true,
camelcase: 'title'
},
body_content: {
sqlType: 'text',
isNullable: true,
camelcase: 'bodyContent'
},
category_id: {
sqlType: 'int4',
isNullable: true,
foreignKey: true,
camelcase: 'categoryId'
}
}
camelCasedFields: {
id: {
sqlType: 'int4',
isNullable: false,
primaryKey: true,
camelcase: 'id'
},
title: {
sqlType: 'varchar',
isNullable: true,
camelcase: 'title'
},
bodyContent: {
sqlType: 'text',
isNullable: true,
camelcase: 'bodyContent'
},
categoryId: {
sqlType: 'int4',
isNullable: true,
foreignKey: true,
camelcase: 'categoryId'
}
},
relations: []
}
+ + + + \ No newline at end of file diff --git a/docs/0.42.0/reference/sql-mapper/entities/hooks/index.html b/docs/0.42.0/reference/sql-mapper/entities/hooks/index.html new file mode 100644 index 00000000000..2b6fd749303 --- /dev/null +++ b/docs/0.42.0/reference/sql-mapper/entities/hooks/index.html @@ -0,0 +1,17 @@ + + + + + +Hooks | Platformatic Open Source Software + + + + + +
+
Version: 0.42.0

Hooks

Entity hooks are a way to wrap the API methods for an entity and add custom behaviour.

The Platformatic DB SQL Mapper provides an addEntityHooks(entityName, spec) function that can be used to add hooks for an entity.

How to use hooks

addEntityHooks accepts two arguments:

  1. A string representing the entity name (singularized), for example 'page'.
  2. A key/value object where the key is one of the API methods (find, insert, save, delete) and the value is a callback function. The callback will be called with the original API method and the options that were passed to that method. See the example below.

Usage

'use strict'
const { connect } = require('@platformatic/sql-mapper')
const { pino } = require('pino')
const pretty = require('pino-pretty')
const logger = pino(pretty())

async function main() {
const pgConnectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
const mapper = await connect({
connectionString: pgConnectionString,
log: logger,
})
mapper.addEntityHooks('page', {
find: async (originalFind, opts) => {
// Add a `foo` field with `bar` value to each row
const res = await originalFind(opts)
return res.map((row) => {
row.foo = 'bar'
return row
})
}
})
const res = await mapper.entities.page.find({
fields: ['id', 'title',],
where: {
id: {
lt: 10
}
},
})
logger.info(res)
/**
[
0: {
"id": "5",
"title": "Page 1",
"foo": "bar"
},
1: {
"id": "6",
"title": "Page 2",
"foo": "bar"
}
]
*/
await mapper.db.dispose()
}
main()

Multiple Hooks

Multiple hooks can be added for the same entity and API method, for example:

'use strict'
const { connect } = require('@platformatic/sql-mapper')
const { pino } = require('pino')
const pretty = require('pino-pretty')
const logger = pino(pretty())

async function main() {
const pgConnectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
const mapper = await connect({
connectionString: pgConnectionString,
log: logger,
})
mapper.addEntityHooks('page', {
find: async function firstHook(previousFunction, opts) {
// Add a `foo` field with `bar` value to each row
const res = await previousFunction(opts)
return res.map((row) => {
row.foo = 'bar'
return row
})
}
})
mapper.addEntityHooks('page', {
find: async function secondHook(previousFunction, opts) {
// Add a `bar` field with `baz` value to each row
const res = await previousFunction(opts)
return res.map((row) => {
row.bar = 'baz'
return row
})
}
})
const res = await mapper.entities.page.find({
fields: ['id', 'title',],
where: {
id: {
lt: 10
}
},
})
logger.info(res)
/**
[
0: {
"id": "5",
"title": "Page 1",
"foo": "bar",
"bar": "baz"
},
1: {
"id": "6",
"title": "Page 2",
"foo": "bar",
"bar": "baz"
}
]
*/
await mapper.db.dispose()
}
main()

Since hooks are wrappers, they are being called in reverse order, like the image below

Hooks Lifecycle

So even though we defined two hooks, the Database will be hit only once.

Query result will be processed by firstHook, which will pass the result to secondHook, which will, finally, send the processed result to the original .find({...}) function.

+ + + + \ No newline at end of file diff --git a/docs/0.42.0/reference/sql-mapper/entities/introduction/index.html b/docs/0.42.0/reference/sql-mapper/entities/introduction/index.html new file mode 100644 index 00000000000..a0b2c2af326 --- /dev/null +++ b/docs/0.42.0/reference/sql-mapper/entities/introduction/index.html @@ -0,0 +1,17 @@ + + + + + +Introduction to Entities | Platformatic Open Source Software + + + + + +
+
Version: 0.42.0

Introduction to Entities

The primary goal of Platformatic DB is to read a database schema and generate REST and GraphQL endpoints that enable the execution of CRUD (Create/Retrieve/Update/Delete) operations against the database.

Platformatic DB includes a mapper that reads the schemas of database tables and then generates an entity object for each table.

Platformatic DB is a Fastify application. The Fastify instance object is decorated with the platformatic property, which exposes several APIs that handle the manipulation of data in the database.

Platformatic DB populates the app.platformatic.entities object with data found in database tables.

The keys on the entities object are singularized versions of the table names — for example users becomes user, categories becomes category — and the values are a set of associated metadata and functions.

+ + + + \ No newline at end of file diff --git a/docs/0.42.0/reference/sql-mapper/entities/relations/index.html b/docs/0.42.0/reference/sql-mapper/entities/relations/index.html new file mode 100644 index 00000000000..9e41a6e5ae6 --- /dev/null +++ b/docs/0.42.0/reference/sql-mapper/entities/relations/index.html @@ -0,0 +1,20 @@ + + + + + +Relations | Platformatic Open Source Software + + + + + +
+
Version: 0.42.0

Relations

When Platformatic DB is reading your database schema, it identifies relationships +between tables and stores metadata on them in the entity object's relations field. +This is achieved by querying the database's internal metadata.

Example

Given this PostgreSQL schema:

CREATE SEQUENCE IF NOT EXISTS categories_id_seq;

CREATE TABLE "categories" (
"id" int4 NOT NULL DEFAULT nextval('categories_id_seq'::regclass),
"name" varchar(255) NOT NULL,
PRIMARY KEY ("id")
);

CREATE SEQUENCE IF NOT EXISTS pages_id_seq;

CREATE TABLE "pages" (
"id" int4 NOT NULL DEFAULT nextval('pages_id_seq'::regclass),
"title" varchar(255) NOT NULL,
"body_content" text,
"category_id" int4,
PRIMARY KEY ("id")
);

ALTER TABLE "pages" ADD FOREIGN KEY ("category_id") REFERENCES "categories"("id");

When this code is run:

'use strict'
const { connect } = require('@platformatic/sql-mapper')
const { pino } = require('pino')
const pretty = require('pino-pretty')
const logger = pino(pretty())

async function main() {
const pgConnectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
const mapper = await connect({
connectionString: pgConnectionString,
log: logger,
})
const pageEntity = mapper.entities.page
console.log(pageEntity.relations)
await mapper.db.dispose()
}
main()

The output will be:

[
{
constraint_catalog: 'postgres',
constraint_schema: 'public',
constraint_name: 'pages_category_id_fkey',
table_catalog: 'postgres',
table_schema: 'public',
table_name: 'pages',
constraint_type: 'FOREIGN KEY',
is_deferrable: 'NO',
initially_deferred: 'NO',
enforced: 'YES',
column_name: 'category_id',
ordinal_position: 1,
position_in_unique_constraint: 1,
foreign_table_name: 'categories',
foreign_column_name: 'id'
}
]

As Platformatic DB supports multiple database engines, the contents of the +relations object will vary depending on the database being used.

The following relations fields are common to all database engines:

  • column_name — the column that stores the foreign key
  • foreign_table_name — the table hosting the related row
  • foreign_column_name — the column in foreign table that identifies the row
+ + + + \ No newline at end of file diff --git a/docs/0.42.0/reference/sql-mapper/entities/timestamps/index.html b/docs/0.42.0/reference/sql-mapper/entities/timestamps/index.html new file mode 100644 index 00000000000..b90baa8d7a0 --- /dev/null +++ b/docs/0.42.0/reference/sql-mapper/entities/timestamps/index.html @@ -0,0 +1,17 @@ + + + + + +Timestamps | Platformatic Open Source Software + + + + + +
+
Version: 0.42.0

Timestamps

Timestamps can be used to automatically set the created_at and updated_at fields on your entities.

Timestamps are enabled by default

Configuration

To disable timestamps, you need to set the autoTimestamp field to false in configuration file:

{
...
"db": {
"connectionString": "postgres://postgres:postgres@127.0.0.1/postgres",
"autoTimestamp": false
},
...
}

Customizing the field names

By default, the created_at and updated_at fields are used. You can customize the field names by setting the createdAt and updatedAt options in autoTimestamp field in configuration file:

{
...
"db": {
"connectionString": "postgres://postgres:postgres@127.0.0.1/postgres",
"autoTimestamp": {
"createdAt": "inserted_at",
"updatedAt": "updated_at"
}
...
}
+ + + + \ No newline at end of file diff --git a/docs/0.42.0/reference/sql-mapper/entities/transactions/index.html b/docs/0.42.0/reference/sql-mapper/entities/transactions/index.html new file mode 100644 index 00000000000..c98b3f736d7 --- /dev/null +++ b/docs/0.42.0/reference/sql-mapper/entities/transactions/index.html @@ -0,0 +1,18 @@ + + + + + +Transactions | Platformatic Open Source Software + + + + + +
+
Version: 0.42.0

Transactions

Platformatic DB entites support transaction through the tx optional parameter. +If the tx parameter is provided, the entity will join the transaction, e.g.:


const { connect } = require('@platformatic/sql-mapper')
const logger = pino(pretty())

async function main() {
const pgConnectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
const { db, entities} = await connect({
connectionString: pgConnectionString,
log: logger,
})

const result = await db.tx(async tx => {
// these two operations will be executed in the same transaction
const authorResult = await entities.author.save({
fields: ['id', 'name'],
input: { name: 'test'},
tx
})
const res = await entities.page.save({
fields: ['title', 'authorId'],
input: { title: 'page title', authorId: authorResult.id },
tx
})
return res
})

}

Throwing an Error triggers a transaction rollback:

    try {
await db.tx(async tx => {
await entities.page.save({
input: { title: 'new page' },
fields: ['title'],
tx
})

// here we have `new page`
const findResult = await entities.page.find({ fields: ['title'], tx })

// (...)

// We force the rollback
throw new Error('rollback')
})
} catch (e) {
// rollback
}

// no 'new page' here...
const afterRollback = await entities.page.find({ fields: ['title'] })

+ + + + \ No newline at end of file diff --git a/docs/0.42.0/reference/sql-mapper/fastify-plugin/index.html b/docs/0.42.0/reference/sql-mapper/fastify-plugin/index.html new file mode 100644 index 00000000000..439491b8526 --- /dev/null +++ b/docs/0.42.0/reference/sql-mapper/fastify-plugin/index.html @@ -0,0 +1,17 @@ + + + + + +sql-mapper Fastify Plugin | Platformatic Open Source Software + + + + + +
+
Version: 0.42.0

sql-mapper Fastify Plugin

The @platformatic/sql-mapper package exports a Fastify plugin that can be used out-of the box in a server application.

A connectionString option must be passed to connect to your database.

The plugin decorates the server with a platformatic object that has the following properties:

  • db — the DB wrapper object provided by @databases
  • sql — the SQL query mapper object provided by @databases
  • entities — all entity objects with their API methods
  • addEntityHooks — a function to add a hook to an entity API method.

The plugin also decorates the Fastify Request object with the following:

  • platformaticContext: an object with the following two properties:
    • app, the Fastify application of the given route
    • reply, the Fastify Reply instance matching that request

Usage

'use strict'

const Fastify = require('fastify')
const mapper = require('@platformatic/sql-mapper')

async function main() {
const app = Fastify({
logger: {
level: 'info'
}
})
app.register(mapper.plugin, {
connectionString: 'postgres://postgres:postgres@127.0.0.1/postgres'
})

app.get('/all-pages', async (req, reply) => {
// Optionally get the platformatic context.
// Passing this to all sql-mapper functions allow to apply
// authorization rules to the database queries (amongst other things).
const ctx = req.platformaticContext

// Will return all rows from 'pages' table
const res = await app.platformatic.entities.page.find({ ctx })
return res
})

await app.listen({ port: 3333 })
}

main()
+ + + + \ No newline at end of file diff --git a/docs/0.42.0/reference/sql-mapper/introduction/index.html b/docs/0.42.0/reference/sql-mapper/introduction/index.html new file mode 100644 index 00000000000..40fba535fc7 --- /dev/null +++ b/docs/0.42.0/reference/sql-mapper/introduction/index.html @@ -0,0 +1,19 @@ + + + + + +Introduction to @platformatic/sql-mapper | Platformatic Open Source Software + + + + + +
+
Version: 0.42.0

Introduction to @platformatic/sql-mapper

@platformatic/sql-mapper is the underlining utility that Platformatic DB uses to create useful utilities to +manipulate your SQL database using JavaScript.

This module is bundled with Platformatic DB via a fastify plugin +The rest of this guide shows how to use this module directly.

Install

npm i @platformatic/sql-mapper

API

connect(opts) : Promise

It will inspect a database schema and return an object containing:

  • db — A database abstraction layer from @databases
  • sql — The SQL builder from @databases
  • entities — An object containing a key for each table found in the schema, with basic CRUD operations. See Entity Reference for details.

The valid options are:

  • connectionString — The Database connection string
  • poolSize - Maximum number of connections in the connection pool. Defaults to 10.
  • log — A logger object (like Pino)
  • onDatabaseLoad — An async function that is called after the connection is established. It will receive db and sql as parameter.
  • ignore — Object used to ignore some tables from building entities. (i.e. { 'versions': true } will ignore versions table)
  • autoTimestamp — Generate timestamp automatically when inserting/updating records.
  • hooks — For each entity name (like Page) you can customize any of the entity API function. Your custom function will receive the original function as first parameter, and then all the other parameters passed to it.

createConnectionPool(opts) : Promise

It will inspect a database schema and return an object containing:

The valid options are:

  • connectionString — The Database connection string
  • poolSize - Maximum number of connections in the connection pool. Defaults to 10.
  • log — A logger object (like Pino)

This utility is useful if you just need to connect to the db without generating any entity.

Code samples

const { connect } = require('@platformatic/sql-mapper')
const { pino } = require('pino')

const logger = pino()

async function onDatabaseLoad (db, sql) {
await db.query(sql`CREATE TABLE pages (
id SERIAL PRIMARY KEY,
title VARCHAR(255) NOT NULL
);`)
}
const connectionString =
'postgres://postgres:postgres@localhost:5432/postgres'
const mapper = await connect({
connectionString,
log: logger,
onDatabaseLoad,
ignore: {},
hooks: {
Page: {
find: async function(_find, opts) {
console.log('hook called');
return await _find(opts)
}
}
}
})
const pageEntity = mapper.entities.page

await mapper.db.query(mapper.sql`SELECT * FROM pages`)
await mapper.db.find('option1', 'option2')
+ + + + \ No newline at end of file diff --git a/docs/0.42.0/reference/sql-openapi/api/index.html b/docs/0.42.0/reference/sql-openapi/api/index.html new file mode 100644 index 00000000000..e4882a8a074 --- /dev/null +++ b/docs/0.42.0/reference/sql-openapi/api/index.html @@ -0,0 +1,22 @@ + + + + + +API | Platformatic Open Source Software + + + + + +
+
Version: 0.42.0

API

Each table is mapped to an entity named after table's name.

In the following reference we'll use some placeholders, but let's see an example

Example

Given this SQL executed against your database:

CREATE TABLE pages (
id SERIAL PRIMARY KEY,
title VARCHAR(255) NOT NULL,
body TEXT NOT NULL
);
  • [PLURAL_ENTITY_NAME] is pages
  • [SINGULAR_ENTITY_NAME] is page
  • [PRIMARY_KEY] is id
  • fields are id, title, body

GET and POST parameters

Some APIs needs the GET method, where parameters must be defined in the URL, or POST/PUT methods, where parameters can be defined in the http request payload.

Fields

Every API can define a fields parameter, representing the entity fields you want to get back for each row of the table. If not specified all fields are returned.

fields parameter are always sent in query string, even for POST, PUT and DELETE requests, as a comma separated value.

## `GET /[PLURAL_ENTITY_NAME]`

Return all entities matching where clause

Where clause

You can define many WHERE clauses in REST API, each clause includes a field, an operator and a value.

The field is one of the fields found in the schema.

The operator follows this table:

Platformatic operatorSQL operator
eq'='
in'IN'
nin'NOT IN'
neq'<>'
gt'>'
gte'>='
lt'<'
lte'<='

The value is the value you want to compare the field to.

For GET requests all these clauses are specified in the query string using the format where.[FIELD].[OPERATOR]=[VALUE]

Example

If you want to get the title and the body of every page where id < 15 you can make an HTTP request like this:

$ curl -X 'GET' \
'http://localhost:3042/pages/?fields=body,title&where.id.lt=15' \
-H 'accept: application/json'

Where clause operations are by default combined with the AND operator. To create an OR condition use the where.or query param.

Each where.or query param can contain multiple conditions separated by a | (pipe).

The where.or conditions are similar to the where conditions, except that they don't have the where prefix.

Example

If you want to get the posts where counter = 10 OR counter > 30 you can make an HTTP request like this:

$ curl -X 'GET' \
'http://localhost:3042/pages/?where.or=(counter.eq=10|counter.gte=30)' \
-H 'accept: application/json'

OrderBy clause

You can define the ordering of the returned rows within your REST API calls with the orderby clause using the following pattern:

?orderby.[field]=[asc | desc]

The field is one of the fields found in the schema. +The value can be asc or desc.

Example

If you want to get the pages ordered alphabetically by their titles you can make an HTTP request like this:

$ curl -X 'GET' \
'http://localhost:3042/pages?orderby.title=asc' \
-H 'accept: application/json'

Total Count

If totalCount boolean is true in query, the GET returns the total number of elements in the X-Total-Count header ignoring limit and offset (if specified).

$ curl -v -X 'GET' \
'http://localhost:3042/pages/?limit=2&offset=0&totalCount=true' \
-H 'accept: application/json'

(...)
> HTTP/1.1 200 OK
> x-total-count: 18
(...)

[{"id":1,"title":"Movie1"},{"id":2,"title":"Movie2"}]%

POST [PLURAL_ENTITY_NAME]

Creates a new row in table. Expects fields to be sent in a JSON formatted request body.

Example

$ curl -X 'POST' \
'http://localhost:3042/pages/' \
-H 'accept: application/json' \
-H 'Content-Type: application/json' \
-d '{
"title": "Hello World",
"body": "Welcome to Platformatic!"
}'

{
"id": 1,
"title": "Hello World",
"body": "Welcome to Platformatic"
}

GET [PLURAL_ENTITY_NAME]/[PRIMARY_KEY]

Returns a single row, identified by PRIMARY_KEY.

Example

$ curl -X 'GET' 'http://localhost:3042/pages/1?fields=title,body

{
"title": "Hello World",
"body": "Welcome to Platformatic"
}

POST [PLURAL_ENTITY_NAME]/[PRIMARY_KEY]

Updates a row identified by PRIMARY_KEY.

Example

$ curl -X 'POST' \
'http://localhost:3042/pages/1' \
-H 'accept: application/json' \
-H 'Content-Type: application/json' \
-d '{
"title": "Hello Platformatic!",
"body": "Welcome to Platformatic!"
}'

{
"id": 1,
"title": "Hello Platformatic!",
"body": "Welcome to Platformatic"
}

PUT [PLURAL_ENTITY_NAME]/[PRIMARY_KEY]

Same as POST [PLURAL_ENTITY_NAME]/[PRIMARY_KEY].

## `PUT [PLURAL_ENTITY_NAME]`

Updates all entities matching where clause

Example

$ curl -X 'PUT' \
'http://localhost:3042/pages?where.id.in=1,2' \
-H 'accept: application/json' \
-H 'Content-Type: application/json' \
-d '{
"title": "Updated title!",
"body": "Updated body!"
}'

[{
"id": 1,
"title": "Updated title!",
"body": "Updated body!"
},{
"id": 2,
"title": "Updated title!",
"body": "Updated body!"
}]

DELETE [PLURAL_ENTITY_NAME]/[PRIMARY_KEY]

Deletes a row identified by the PRIMARY_KEY.

Example

$ curl -X 'DELETE' 'http://localhost:3042/pages/1?fields=title'

{
"title": "Hello Platformatic!"
}

Nested Relationships

Let's consider the following SQL:

CREATE TABLE IF NOT EXISTS movies (
movie_id INTEGER PRIMARY KEY,
title TEXT NOT NULL
);
CREATE TABLE IF NOT EXISTS quotes (
id INTEGER PRIMARY KEY,
quote TEXT NOT NULL,
movie_id INTEGER NOT NULL REFERENCES movies(movie_id)
);

And:

  • [P_PARENT_ENTITY] is movies
  • [S_PARENT_ENTITY] is movie
  • [P_CHILDREN_ENTITY] is quotes
  • [S_CHILDREN_ENTITY] is quote

In this case, more APIs are available:

GET [P_PARENT_ENTITY]/[PARENT_PRIMARY_KEY]/[P_CHILDREN_ENTITY]

Given a 1-to-many relationship, where a parent entity can have many children, you can query for the children directly.

$ curl -X 'GET' 'http://localhost:3042/movies/1/quotes?fields=quote

[
{
"quote": "I'll be back"
},
{
"quote": "Hasta la vista, baby"
}
]

GET [P_CHILDREN_ENTITY]/[CHILDREN_PRIMARY_KEY]/[S_PARENT_ENTITY]

You can query for the parent directly, e.g.:

$ curl -X 'GET' 'http://localhost:3042/quotes/1/movie?fields=title

{
"title": "Terminator"
}

Many-to-Many Relationships

Many-to-Many relationship lets you relate each row in one table to many rows in +another table and vice versa.

Many-to-many relationship are implemented in SQL via a "join table", a table whose primary key +is composed by the identifier of the two parts of the many-to-many relationship.

Platformatic DB fully support many-to-many relationships on all supported database.

Let's consider the following SQL:

CREATE TABLE pages (
id INTEGER PRIMARY KEY,
the_title VARCHAR(42)
);

CREATE TABLE users (
id INTEGER PRIMARY KEY,
username VARCHAR(255) NOT NULL
);

CREATE TABLE editors (
page_id INTEGER NOT NULL,
user_id INTEGER NOT NULL,
role VARCHAR(255) NOT NULL,
CONSTRAINT fk_editor_pages FOREIGN KEY (page_id) REFERENCES pages(id),
CONSTRAINT fk_editor_users FOREIGN KEY (user_id) REFERENCES users(id),
PRIMARY KEY (page_id, user_id)
);

And:

  • [P_ENTITY] is editors
  • [P_REL_1] is pages
  • [S_REL_1] is page
  • [KEY_REL_1] is pages PRIMARY KEY: pages(id)
  • [P_REL_2] is users
  • [S_REL_2] is user
  • [KEY_REL_2] is users PRIMARY KEY: users(id)

In this case, here the APIs that are available for the join table:

GET [P_ENTITY]/[S_REL_1]/[KEY_REL_1]/[S_REL_2]/[KEY_REL_2]

This returns the entity in the "join table", e.g. GET /editors/page/1/user/1.

POST [P_ENTITY]/[S_REL_1]/[KEY_REL_1]/[S_REL_2]/[KEY_REL_2]

Creates a new entity in the "join table", e.g. POST /editors/page/1/user/1.

PUT [P_ENTITY]/[S_REL_1]/[KEY_REL_1]/[S_REL_2]/[KEY_REL_2]

Updates an entity in the "join table", e.g. PUT /editors/page/1/user/1.

DELETE [P_ENTITY]/[S_REL_1]/[KEY_REL_1]/[S_REL_2]/[KEY_REL_2]

Delete the entity in the "join table", e.g. DELETE /editors/page/1/user/1.

GET /[P_ENTITY]

See the above.

Offset only accepts values >= 0. Otherwise an error is return.

Pagination

The Platformatic DB supports for result's pagination through input parameters: limit and offset

Example

$ curl -X 'GET' 'http://localhost:3042/movies?limit=5&offset=10

[
{
"title": "Star Wars",
"movie_id": 10
},
...
{
"title": "007",
"movie_id": 14
}
]

It returns 5 movies starting from position 10.

TotalCount functionality can be used in order to evaluate if there are more pages.

Limit

By default a limit value (10) is applied to each request.

Clients can override this behavior by passing a value. +In this case the server validates the input and an error is return if exceeds the max accepted value (100).

Limit's values can be customized through configuration:

{
...
"db": {
...
"limit": {
"default": 50,
"max": 1000
}
}
}

Limit only accepts values >= 0. Otherwise an error is return.

Offset

By default offset is not applied to the request. +Clients can override this behavior by passing a value.

Offset only accepts values >= 0. Otherwise an error is return.

+ + + + \ No newline at end of file diff --git a/docs/0.42.0/reference/sql-openapi/ignore/index.html b/docs/0.42.0/reference/sql-openapi/ignore/index.html new file mode 100644 index 00000000000..805293b78c6 --- /dev/null +++ b/docs/0.42.0/reference/sql-openapi/ignore/index.html @@ -0,0 +1,17 @@ + + + + + +Ignoring entities and fields | Platformatic Open Source Software + + + + + +
+
Version: 0.42.0

Ignoring entities and fields

@platformatic/sql-openapi allows to selectively ignore entities and fields.

To ignore entites:

app.register(require('@platformatic/sql-openapi'), {
ignore: {
categories: true
}
})

To ignore individual fields:

app.register(require('@platformatic/sql-openapi'), {
ignore: {
categories: {
name: true
}
}
})
+ + + + \ No newline at end of file diff --git a/docs/0.42.0/reference/sql-openapi/introduction/index.html b/docs/0.42.0/reference/sql-openapi/introduction/index.html new file mode 100644 index 00000000000..8e5e900b8bc --- /dev/null +++ b/docs/0.42.0/reference/sql-openapi/introduction/index.html @@ -0,0 +1,17 @@ + + + + + +Introduction to the REST API | Platformatic Open Source Software + + + + + +
+
Version: 0.42.0

Introduction to the REST API

The Platformatic DB OpenAPI plugin automatically starts a REST API server (powered by Fastify) that provides CRUD (Create, Read, Update, Delete) functionality for each entity.

Configuration

In the config file, under the "db" section, the OpenAPI server is enabled by default. Although you can disable it setting the property openapi to false.

Example

{
...
"db": {
"openapi": false
}
}

As Platformatic DB uses fastify-swagger under the hood, the "openapi" property can be an object that follows the OpenAPI Specification Object format.

This allows you to extend the output of the Swagger UI documentation.

+ + + + \ No newline at end of file diff --git a/docs/category/getting-started/index.html b/docs/category/getting-started/index.html new file mode 100644 index 00000000000..8db984c4548 --- /dev/null +++ b/docs/category/getting-started/index.html @@ -0,0 +1,17 @@ + + + + + +Getting Started | Platformatic Open Source Software + + + + + + + + + + \ No newline at end of file diff --git a/docs/category/guides/index.html b/docs/category/guides/index.html new file mode 100644 index 00000000000..d4172c22b2f --- /dev/null +++ b/docs/category/guides/index.html @@ -0,0 +1,17 @@ + + + + + +Guides | Platformatic Open Source Software + + + + + +
+
Version: 0.42.1

Guides

+ + + + \ No newline at end of file diff --git a/docs/category/packages/index.html b/docs/category/packages/index.html new file mode 100644 index 00000000000..a730037fc9d --- /dev/null +++ b/docs/category/packages/index.html @@ -0,0 +1,17 @@ + + + + + +Packages | Platformatic Open Source Software + + + + + + + + + + \ No newline at end of file diff --git a/docs/category/platformatic-cloud/index.html b/docs/category/platformatic-cloud/index.html new file mode 100644 index 00000000000..c0d312c122a --- /dev/null +++ b/docs/category/platformatic-cloud/index.html @@ -0,0 +1,17 @@ + + + + + +Platformatic Cloud | Platformatic Open Source Software + + + + + + + + + + \ No newline at end of file diff --git a/docs/category/reference/index.html b/docs/category/reference/index.html new file mode 100644 index 00000000000..01aa69a83eb --- /dev/null +++ b/docs/category/reference/index.html @@ -0,0 +1,17 @@ + + + + + +Reference | Platformatic Open Source Software + + + + + + + + + + \ No newline at end of file diff --git a/docs/contributing/documentation-style-guide/index.html b/docs/contributing/documentation-style-guide/index.html new file mode 100644 index 00000000000..1ede80a423f --- /dev/null +++ b/docs/contributing/documentation-style-guide/index.html @@ -0,0 +1,74 @@ + + + + + +Documentation Style Guide | Platformatic Open Source Software + + + + + +
+
Version: 0.42.1

Documentation Style Guide

Welcome to the Platformatic Documentation Style Guide. This guide is here to provide +you with a conventional writing style for users writing developer documentation on +our Open Source framework. Each topic is precise and well explained to help you write +documentation users can easily understand and implement.

Who is This Guide For?

This guide is for anyone who loves to build with Platformatic or wants to contribute +to our documentation. You do not need to be an expert in writing technical +documentation. This guide is here to help you.

Visit CONTRIBUTING.md +file on GitHub to join our Open Source folks.

Before you Write

You should have a basic understanding of:

  • JavaScript
  • Node.js
  • Git
  • GitHub
  • Markdown
  • HTTP
  • NPM

Consider Your Audience

Before you start writing, think about your audience. In this case, your audience +should already know HTTP, JavaScript, NPM, and Node.js. It is necessary to keep +your readers in mind because they are the ones consuming your content. You want +to give as much useful information as possible. Consider the vital things they +need to know and how they can understand them. Use words and references that +readers can relate to easily. Ask for feedback from the community, it can help +you write better documentation that focuses on the user and what you want to +achieve.

Get Straight to the Point

Give your readers a clear and precise action to take. Start with what is most +important. This way, you can help them find what they need faster. Mostly, +readers tend to read the first content on a page, and many will not scroll +further.

Example

Less like this:

Colons are very important to register a parametric path. It lets +the framework know there is a new parameter created. You can place the colon +before the parameter name so the parametric path can be created.

More Like this:

To register a parametric path, put a colon before the parameter +name. Using a colon lets the framework know it is a parametric path and not a +static path.

Images and Video Should Enhance the Written Documentation

Images and video should only be added if they complement the written +documentation, for example to help the reader form a clearer mental model of a +concept or pattern.

Images can be directly embedded, but videos should be included by linking to an +external site, such as YouTube. You can add links by using +[Title](https://www.websitename.com) in the Markdown.

Avoid Plagiarism

Make sure you avoid copying other people's work. Keep it as original as +possible. You can learn from what they have done and reference where it is from +if you used a particular quote from their work.

Word Choice

There are a few things you need to use and avoid when writing your documentation +to improve readability for readers and make documentation neat, direct, and +clean.

When to use the Second Person "you" as the Pronoun

When writing articles or guides, your content should communicate directly to +readers in the second person ("you") addressed form. It is easier to give them +direct instruction on what to do on a particular topic. To see an example, visit +the Quick Start Guide.

Example

Less like this:

We can use the following plugins.

More like this:

You can use the following plugins.

According to Wikipedia, You is usually a second person pronoun. +Also, used to refer to an indeterminate person, as a more common alternative +to a very formal indefinite pronoun.

To recap, use "you" when writing articles or guides.

When to Avoid the Second Person "you" as the Pronoun

One of the main rules of formal writing such as reference documentation, or API +documentation, is to avoid the second person ("you") or directly addressing the +reader.

Example

Less like this:

You can use the following recommendation as an example.

More like this:

As an example, the following recommendations should be +referenced.

To view a live example, refer to the Decorators +reference document.

To recap, avoid "you" in reference documentation or API documentation.

Avoid Using Contractions

Contractions are the shortened version of written and spoken forms of a word, +i.e. using "don't" instead of "do not". Avoid contractions to provide a more +formal tone.

Avoid Using Condescending Terms

Condescending terms are words that include:

  • Just
  • Easy
  • Simply
  • Basically
  • Obviously

The reader may not find it easy to use Platformatic; avoid +words that make it sound simple, easy, offensive, or insensitive. Not everyone +who reads the documentation has the same level of understanding.

Starting With a Verb

Mostly start your description with a verb, which makes it simple and precise for +the reader to follow. Prefer using present tense because it is easier to read +and understand than the past or future tense.

Example

Less like this:

There is a need for Node.js to be installed before you can be +able to use Platformatic.

More like this:

Install Node.js to make use of Platformatic.

Grammatical Moods

Grammatical moods are a great way to express your writing. Avoid sounding too +bossy while making a direct statement. Know when to switch between indicative, +imperative, and subjunctive moods.

Indicative - Use when making a factual statement or question.

Example

Since there is no testing framework available, "Platformatic recommends ways +to write tests".

Imperative - Use when giving instructions, actions, commands, or when you +write your headings.

Example

Install dependencies before starting development.

Subjunctive - Use when making suggestions, hypotheses, or non-factual +statements.

Example

Reading the documentation on our website is recommended to get +comprehensive knowledge of the framework.

Use Active Voice Instead of Passive

Using active voice is a more compact and direct way of conveying your +documentation.

Example

Passive:

The node dependencies and packages are installed by npm.

Active:

npm installs packages and node dependencies.

Writing Style

Documentation Titles

When creating a new guide, API, or reference in the /docs/ directory, use +short titles that best describe the topic of your documentation. Name your files +in kebab-cases and avoid Raw or camelCase. To learn more about kebab-case you +can visit this medium article on Case +Styles.

Examples:

hook-and-plugins.md

adding-test-plugins.md

removing-requests.md

Hyperlinks should have a clear title of what it references. Here is how your +hyperlink should look:

<!-- More like this -->

// Add clear & brief description
[Fastify Plugins] (https://www.fastify.io/docs/latest/Plugins/)

<!--Less like this -->

// incomplete description
[Fastify] (https://www.fastify.io/docs/latest/Plugins/)

// Adding title in link brackets
[](https://www.fastify.io/docs/latest/Plugins/ "fastify plugin")

// Empty title
[](https://www.fastify.io/docs/latest/Plugins/)

// Adding links localhost URLs instead of using code strings (``)
[http://localhost:3000/](http://localhost:3000/)

Include in your documentation as many essential references as possible, but +avoid having numerous links when writing to avoid distractions.

+ + + + \ No newline at end of file diff --git a/docs/contributing/index.html b/docs/contributing/index.html new file mode 100644 index 00000000000..9ecceda577e --- /dev/null +++ b/docs/contributing/index.html @@ -0,0 +1,18 @@ + + + + + +Contributing | Platformatic Open Source Software + + + + + + + + + + \ No newline at end of file diff --git a/docs/getting-started/architecture/index.html b/docs/getting-started/architecture/index.html new file mode 100644 index 00000000000..76a3bfc4638 --- /dev/null +++ b/docs/getting-started/architecture/index.html @@ -0,0 +1,25 @@ + + + + + +Architecture | Platformatic Open Source Software + + + + + +
+
Version: 0.42.1

Architecture

Platformatic is a collection of Open Source tools designed to eliminate friction +in backend development. The first of those tools is Platformatic DB, which is developed +as @platformatic/db.

Platformatic DB

Platformatic DB can expose a SQL database by dynamically mapping it to REST/OpenAPI +and GraphQL endpoints. It supports a limited subset of the SQL query language, but +also allows developers to add their own custom routes and resolvers.

Platformatic DB Architecture

Platformatic DB is composed of a few key libraries:

  1. @platformatic/sql-mapper - follows the Data Mapper pattern to build an API on top of a SQL database. +Internally it uses the @database project.
  2. @platformatic/sql-openapi - uses sql-mapper to create a series of REST routes and matching OpenAPI definitions. +Internally it uses @fastify/swagger.
  3. @platformatic/sql-graphql - uses sql-mapper to create a GraphQL endpoint and schema. sql-graphql also support Federation. +Internally it uses mercurius.

Platformatic DB allows you to load a Fastify plugin during server startup that contains your own application-specific code. +The plugin can add more routes or resolvers — these will automatically be shown in the OpenAPI and GraphQL schemas.

SQL database migrations are also supported. They're implemented internally with the postgrator library.

+ + + + \ No newline at end of file diff --git a/docs/getting-started/movie-quotes-app-tutorial/index.html b/docs/getting-started/movie-quotes-app-tutorial/index.html new file mode 100644 index 00000000000..05477bed851 --- /dev/null +++ b/docs/getting-started/movie-quotes-app-tutorial/index.html @@ -0,0 +1,129 @@ + + + + + +Movie Quotes App Tutorial | Platformatic Open Source Software + + + + + +
+
Version: 0.42.1

Movie Quotes App Tutorial

This tutorial will help you learn how to build a full stack application on top +of Platformatic DB. We're going to build an application that allows us to +save our favourite movie quotes. We'll also be building in custom API functionality +that allows for some neat user interaction on our frontend.

You can find the complete code for the application that we're going to build +on GitHub.

note

We'll be building the frontend of our application with the Astro +framework, but the GraphQL API integration steps that we're going to cover can +be applied with most frontend frameworks.

What we're going to cover

In this tutorial we'll learn how to:

  • Create a Platformatic API
  • Apply database migrations
  • Create relationships between our API entities
  • Populate our database tables
  • Build a frontend application that integrates with our GraphQL API
  • Extend our API with custom functionality
  • Enable CORS on our Platformatic API

Prerequisites

To follow along with this tutorial you'll need to have these things installed:

You'll also need to have some experience with JavaScript, and be comfortable with +running commands in a terminal.

Build the backend

Create a Platformatic API

First, let's create our project directory:

mkdir -p tutorial-movie-quotes-app/apps/movie-quotes-api/

cd tutorial-movie-quotes-app/apps/movie-quotes-api/

Run this command in your terminal to start the Platformatic creator wizard:

npm create platformatic@latest

This interactive command-line tool will ask you some questions about how you'd +like to set up your new Platformatic project. For this guide, select these options:

- Which kind of project do you want to create?  => DB
- Where would you like to create your project? => quick-start
- Do you want to create default migrations? => Yes
- Do you want to create a plugin? => Yes
- Do you want to use TypeScript? => No
- Do you want to install dependencies? => Yes (this can take a while)
- Do you want to apply the migrations? => Yes
- Do you want to generate types? => Yes
- Do you want to create the github action to deploy this application to Platformatic Cloud dynamic workspace? => No
- Do you want to create the github action to deploy this application to Platformatic Cloud static workspace? => No

Once the wizard is complete, you'll have a Platformatic app project in the +folder quick-start, with example migration files and a plugin script.

info

Make sure you run the npm/yarn/pnpm command install command manually if you +don't ask the wizard to do it for you.

Define the database schema

Let's create a new directory to store our migration files:

mkdir migrations

Then we'll create a migration file named 001.do.sql in the migrations +directory:

CREATE TABLE quotes (
id INTEGER PRIMARY KEY,
quote TEXT NOT NULL,
said_by VARCHAR(255) NOT NULL,
created_at DATETIME DEFAULT CURRENT_TIMESTAMP
);

Now let's setup migrations in our Platformatic configuration +file, platformatic.db.json:

{
"$schema": "https://platformatic.dev/schemas/v0.23.2/db",
"server": {
"hostname": "{PLT_SERVER_HOSTNAME}",
"port": "{PORT}",
"logger": {
"level": "{PLT_SERVER_LOGGER_LEVEL}"
}
},
"db": {
"connectionString": "{DATABASE_URL}",
"graphql": true,
"openapi": true
},
"plugins": {
"paths": [
"plugin.js"
]
},
"types": {
"autogenerate": true
},
"migrations": {
"dir": "migrations",
"autoApply": true
}
}
info

Take a look at the Configuration reference +to see all the supported configuration settings.

Now we can start the Platformatic DB server:

npm run start

Our Platformatic DB server should start, and we'll see messages like these:

[11:26:48.772] INFO (15235): running 001.do.sql
[11:26:48.864] INFO (15235): server listening
url: "http://127.0.0.1:3042"

Let's open a new terminal and make a request to our server's REST API that +creates a new quote:

curl --request POST --header "Content-Type: application/json" \
-d "{ \"quote\": \"Toto, I've got a feeling we're not in Kansas anymore.\", \"saidBy\": \"Dorothy Gale\" }" \
http://localhost:3042/quotes

We should receive a response like this from the API:

{"id":1,"quote":"Toto, I've got a feeling we're not in Kansas anymore.","saidBy":"Dorothy Gale","createdAt":"1684167422600"}

Create an entity relationship

Now let's create a migration file named 002.do.sql in the migrations +directory:

CREATE TABLE movies (
id INTEGER PRIMARY KEY,
name TEXT NOT NULL UNIQUE
);

ALTER TABLE quotes ADD COLUMN movie_id INTEGER REFERENCES movies(id);

This SQL will create a new movies database table and also add a movie_id +column to the quotes table. This will allow us to store movie data in the +movies table and then reference them by ID in our quotes table.

Let's stop the Platformatic DB server with Ctrl + C, and then start it again:

npm run start

The new migration should be automatically applied and we'll see the log message +running 002.do.sql.

Our Platformatic DB server also provides a GraphQL API. Let's open up the GraphiQL +application in our web browser:

http://localhost:3042/graphiql

Now let's run this query with GraphiQL to add the movie for the quote that we +added earlier:

mutation {
saveMovie(input: { name: "The Wizard of Oz" }) {
id
}
}

We should receive a response like this from the API:

{
"data": {
"saveMovie": {
"id": "1"
}
}
}

Now we can update our quote to reference the movie:

mutation {
saveQuote(input: { id: 1, movieId: 1 }) {
id
quote
saidBy
createdAt
movie {
id
name
}
}
}

We should receive a response like this from the API:

{
"data": {
"saveQuote": {
"id": "1",
"quote": "Toto, I've got a feeling we're not in Kansas anymore.",
"saidBy": "Dorothy Gale",
"movie": {
"id": "1",
"name": "The Wizard of Oz"
}
}
}
}

Our Platformatic DB server has automatically identified the relationship +between our quotes and movies database tables. This allows us to make +GraphQL queries that retrieve quotes and their associated movies at the same +time. For example, to retrieve all quotes from our database we can run:

query {
quotes {
id
quote
saidBy
createdAt
movie {
id
name
}
}
}

To view the GraphQL schema that's generated for our API by Platformatic DB, +we can run this command in our terminal:

npx platformatic db schema graphql

The GraphQL schema shows all of the queries and mutations that we can run +against our GraphQL API, as well as the types of data that it expects as input.

Populate the database

Our movie quotes database is looking a little empty! We're going to create a +"seed" script to populate it with some data.

Let's create a new file named seed.js and copy and paste in this code:

'use strict'

const quotes = [
{
quote: "Toto, I've got a feeling we're not in Kansas anymore.",
saidBy: 'Dorothy Gale',
movie: 'The Wizard of Oz'
},
{
quote: "You're gonna need a bigger boat.",
saidBy: 'Martin Brody',
movie: 'Jaws'
},
{
quote: 'May the Force be with you.',
saidBy: 'Han Solo',
movie: 'Star Wars'
},
{
quote: 'I have always depended on the kindness of strangers.',
saidBy: 'Blanche DuBois',
movie: 'A Streetcar Named Desire'
}
]

module.exports = async function ({ entities, db, sql }) {
for (const values of quotes) {
const movie = await entities.movie.save({ input: { name: values.movie } })

console.log('Created movie:', movie)

const quote = {
quote: values.quote,
saidBy: values.saidBy,
movieId: movie.id
}

await entities.quote.save({ input: quote })

console.log('Created quote:', quote)
}
}
info

Take a look at the Seed a Database guide to learn more +about how database seeding works with Platformatic DB.

Let's stop our Platformatic DB server running and remove our SQLite database:

rm db.sqlite

Now let's create a fresh SQLite database by running our migrations:

npx platformatic db migrations apply

And then let's populate the quotes and movies tables with data using our +seed script:

npx platformatic db seed seed.js

Our database is full of data, but we don't have anywhere to display it. It's +time to start building our frontend!

Build the frontend

We're now going to use Astro to build our frontend +application. If you've not used it before, you might find it helpful +to read this overview +on how Astro components are structured.

tip

Astro provide some extensions and tools to help improve your +Editor Setup when building an +Astro application.

Create an Astro application

In the root tutorial-movie-quotes-app of our project, let's create a new directory for our frontent +application:

mkdir -p apps/movie-quotes-frontend/

cd apps/movie-quotes-frontend/

And then we'll create a new Astro project:

npm create astro@latest -- --template basics

It will ask you some questions about how you'd like to set up +your new Astro project. For this guide, select these options:

Where should we create your new project?

   .
◼ tmpl Using basics as project template
✔ Template copied

Install dependencies? (it's buggy, we'll do it afterwards)

   No
◼ No problem! Remember to install dependencies after setup.

Do you plan to write TypeScript?

   No
◼ No worries! TypeScript is supported in Astro by default, but you are free to continue writing JavaScript instead.

Initialize a new git repository?

   No
◼ Sounds good! You can always run git init manually.

Liftoff confirmed. Explore your project!
Run npm dev to start the dev server. CTRL+C to stop.
Add frameworks like react or tailwind using astro add.

Now we'll edit our Astro configuration file, astro.config.mjs and +copy and paste in this code:

import { defineConfig } from 'astro/config'

// https://astro.build/config
export default defineConfig({
output: 'server'
})

And we'll also edit our tsconfig.json file and add in this configuration:

{
"extends": "astro/tsconfigs/base",
"compilerOptions": {
"types": ["astro/client"]
}
}

Now we can start up the Astro development server with:

npm run dev

And then load up the frontend in our browser at http://localhost:3000

Now that everything is working, we'll remove all default *.astro files from the src/ directory, but we'll keep the directory structure. You can delete them now, or override them later.

Create a layout

In the src/layouts directory, let's create a new file named Layout.astro:

---
export interface Props {
title: string;
page?: string;
}
const { title, page } = Astro.props;
---

<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width" />
<title>{title}</title>
</head>
<body>
<header>
<h1>🎬 Movie Quotes</h1>
</header>
<nav>
<a href="/">All quotes</a>
</nav>
<section>
<slot />
</section>
</body>
</html>

The code between the --- is known as the component script, and the +code after that is the component template. The component script will only run +on the server side when a web browser makes a request. The component template +is rendered server side and sent back as an HTML response to the web browser.

Now we'll update src/pages/index.astro to use this Layout component. +Let's replace the contents of src/pages/index.astro with this code:

---
import Layout from '../layouts/Layout.astro';
---

<Layout title="All quotes" page="listing">
<main>
<p>We'll list all the movie quotes here.</p>
</main>
</Layout>

Integrate the urql GraphQL client

We're now going to integrate the URQL +GraphQL client into our frontend application. This will allow us to run queries +and mutations against our Platformatic GraphQL API.

Let's first install @urql/core and +graphql as project dependencies:

npm install @urql/core graphql

Then let's create a new .env file and add this configuration:

PUBLIC_GRAPHQL_API_ENDPOINT=http://127.0.0.1:3042/graphql

Now we'll create a new directory:

mkdir src/lib

And then create a new file named src/lib/quotes-api.js. In that file we'll +create a new URQL client:

// src/lib/quotes-api.js

import { createClient, cacheExchange, fetchExchange } from '@urql/core';

const graphqlClient = createClient({
url: import.meta.env.PUBLIC_GRAPHQL_API_ENDPOINT,
requestPolicy: "network-only",
exchanges: [cacheExchange, fetchExchange]
});

We'll also add a thin wrapper around the client that does some basic error +handling for us:

// src/lib/quotes-api.js

async function graphqlClientWrapper(method, gqlQuery, queryVariables = {}) {
const queryResult = await graphqlClient[method](
gqlQuery,
queryVariables
).toPromise();

if (queryResult.error) {
console.error("GraphQL error:", queryResult.error);
}

return {
data: queryResult.data,
error: queryResult.error,
};
}

export const quotesApi = {
async query(gqlQuery, queryVariables = {}) {
return await graphqlClientWrapper("query", gqlQuery, queryVariables);
},
async mutation(gqlQuery, queryVariables = {}) {
return await graphqlClientWrapper("mutation", gqlQuery, queryVariables);
}
}

And lastly, we'll export gql from the @urql/core package, to make it +simpler for us to write GraphQL queries in our pages:

// src/lib/quotes-api.js

export { gql } from "@urql/core";

Stop the Astro dev server and then start it again so it picks up the .env +file:

npm run dev

Display all quotes

Let's display all the movie quotes in src/pages/index.astro.

First, we'll update the component script at the top and add in a query to +our GraphQL API for quotes:

---
import Layout from '../layouts/Layout.astro';
import { quotesApi, gql } from '../lib/quotes-api';

const { data } = await quotesApi.query(gql`
query {
quotes {
id
quote
saidBy
createdAt
movie {
id
name
}
}
}
`);

const quotes = data?.quotes || [];
---

Then we'll update the component template to display the quotes:

<Layout title="All quotes" page="listing">
<main>
{quotes.length > 0 ? quotes.map((quote) => (
<div>
<blockquote>
<p>{quote.quote}</p>
</blockquote>
<p>
{quote.saidBy}, {quote.movie?.name}
</p>
<div>
<span>Added {new Date(Number(quote.createdAt)).toUTCString()}</span>
</div>
</div>
)) : (
<p>No movie quotes have been added.</p>
)}
</main>
</Layout>

And just like that, we have all the movie quotes displaying on the page!

Integrate Tailwind for styling

Automatically add the @astrojs/tailwind integration:

npx astro add tailwind --yes

Add the Tailwind CSS Typography +and Forms plugins:

npm install --save-dev @tailwindcss/typography @tailwindcss/forms

Import the plugins in our Tailwind configuration file:

// tailwind.config.cjs

/** @type {import('tailwindcss').Config} */
module.exports = {
content: ['./src/**/*.{astro,html,js,jsx,md,mdx,svelte,ts,tsx,vue}'],
theme: {
extend: {}
},
plugins: [
require('@tailwindcss/forms'),
require('@tailwindcss/typography')
]
}

Stop the Astro dev server and then start it again so it picks up all the +configuration changes:

npm run dev

Style the listing page

To style our listing page, let's add CSS classes to the component template in +src/layouts/Layout.astro:

---
export interface Props {
title: string;
page?: string;
}

const { title, page } = Astro.props;

const navActiveClasses = "font-bold bg-yellow-400 no-underline";
---

<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width" />
<title>{title}</title>
</head>
<body class="py-8">
<header class="prose mx-auto mb-6">
<h1>🎬 Movie Quotes</h1>
</header>
<nav class="prose mx-auto mb-6 border-y border-gray-200 flex">
<a href="/" class={`p-3 ${page === "listing" && navActiveClasses}`}>All quotes</a>
</nav>
<section class="prose mx-auto">
<slot />
</section>
</body>
</html>

Then let's add CSS classes to the component template in src/pages/index.astro:

<Layout title="All quotes">
<main>
{quotes.length > 0 ? quotes.map((quote) => (
<div class="border-b mb-6">
<blockquote class="text-2xl mb-0">
<p class="mb-4">{quote.quote}</p>
</blockquote>
<p class="text-xl mt-0 mb-8 text-gray-400">
{quote.saidBy}, {quote.movie?.name}
</p>
<div class="flex flex-col mb-6 text-gray-400">
<span class="text-gray-400 italic">Added {new Date(Number(quote.createdAt)).toUTCString()}</span>
</div>
</div>
)) : (
<p>No movie quotes have been added.</p>
)}
</main>
</Layout>

Our listing page is now looking much more user friendly!

Create an add quote page

We're going to create a form component that we can use for adding and editing +quotes.

First let's create a new component file, src/components/QuoteForm.astro:

---
export interface QuoteFormData {
id?: number;
quote?: string;
saidBy?: string;
movie?: string;
}

export interface Props {
action: string;
values?: QuoteFormData;
saveError?: boolean;
loadError?: boolean;
submitLabel: string;
}

const { action, values = {}, saveError, loadError, submitLabel } = Astro.props;
---

{saveError && <p class="text-lg bg-red-200 p-4">There was an error saving the quote. Please try again.</p>}
{loadError && <p class="text-lg bg-red-200 p-4">There was an error loading the quote. Please try again.</p>}

<form method="post" action={action} class="grid grid-cols-1 gap-6">
<label for="quote" class="block">
<span>Quote</span>
<textarea id="quote" name="quote" required="required" class="mt-1 w-full">{values.quote}</textarea>
</label>
<label for="said-by" class="block">
<span>Said by</span>
<input type="text" id="said-by" name="saidBy" required="required" value={values.saidBy} class="mt-1 w-full">
</label>
<label for="movie" class="block">
<span>Movie</span>
<input type="text" id="movie" name="movie" required="required" autocomplete="off" value={values.movie} class="form-input mt-1 w-full">
</label>
<input type="submit" value={submitLabel} disabled={loadError && "disabled"} class="bg-yellow-400 hover:bg-yellow-500 text-gray-900 round p-3" />
</form>

Create a new page file, src/pages/add.astro:

---
import Layout from '../layouts/Layout.astro';
import QuoteForm from '../components/QuoteForm.astro';
import type { QuoteFormData } from '../components/QuoteForm.astro';

let formData: QuoteFormData = {};
let saveError = false;
---

<Layout title="Add a movie quote" page="add">
<main>
<h2>Add a quote</h2>
<QuoteForm action="/add" values={formData} saveError={saveError} submitLabel="Add quote" />
</main>
</Layout>

And now let's add a link to this page in the layout navigation in src/layouts/Layout.astro:

<nav class="prose mx-auto mb-6 border-y border-gray-200 flex">
<a href="/" class={`p-3 ${page === "listing" && navActiveClasses}`}>All quotes</a>
<a href="/add" class={`p-3 ${page === "add" && navActiveClasses}`}>Add a quote</a>
</nav>

Send form data to the API

When a user submits the add quote form we want to send the form data to our API +so it can then save it to our database. Let's wire that up now.

First we're going to create a new file, src/lib/request-utils.js:

export function isPostRequest (request) {
return request.method === 'POST'
}

export async function getFormData (request) {
const formData = await request.formData()

return Object.fromEntries(formData.entries())
}

Then let's update the component script in src/pages/add.astro to use +these new request utility functions:

---
import Layout from '../layouts/Layout.astro';
import QuoteForm from '../components/QuoteForm.astro';
import type { QuoteFormData } from '../components/QuoteForm.astro';

import { isPostRequest, getFormData } from '../lib/request-utils';

let formData: QuoteFormData = {};
let saveError = false;

if (isPostRequest(Astro.request)) {
formData = await getFormData(Astro.request);
}
---

When we create a new quote entity record via our API, we need to include a +movieId field that references a movie entity record. This means that when a +user submits the add quote form we need to:

  • Check if a movie entity record already exists with that movie name
  • Return the movie id if it does exist
  • If it doesn't exist, create a new movie entity record and return the movie ID

Let's update the import statement at the top of src/lib/quotes-api.js

-import { createClient } from '@urql/core'
+import { createClient, gql } from '@urql/core'

And then add a new method that will return a movie ID for us:

async function getMovieId (movieName) {
movieName = movieName.trim()

let movieId = null

// Check if a movie already exists with the provided name.
const queryMoviesResult = await quotesApi.query(
gql`
query ($movieName: String!) {
movies(where: { name: { eq: $movieName } }) {
id
}
}
`,
{ movieName }
)

if (queryMoviesResult.error) {
return null
}

const movieExists = queryMoviesResult.data?.movies.length === 1
if (movieExists) {
movieId = queryMoviesResult.data.movies[0].id
} else {
// Create a new movie entity record.
const saveMovieResult = await quotesApi.mutation(
gql`
mutation ($movieName: String!) {
saveMovie(input: { name: $movieName }) {
id
}
}
`,
{ movieName }
)

if (saveMovieResult.error) {
return null
}

movieId = saveMovieResult.data?.saveMovie.id
}

return movieId
}

And let's export it too:

export const quotesApi = {
async query (gqlQuery, queryVariables = {}) {
return await graphqlClientWrapper('query', gqlQuery, queryVariables)
},
async mutation (gqlQuery, queryVariables = {}) {
return await graphqlClientWrapper('mutation', gqlQuery, queryVariables)
},
getMovieId
}

Now we can wire up the last parts in the src/pages/add.astro component +script:

---
import Layout from '../layouts/Layout.astro';
import QuoteForm from '../components/QuoteForm.astro';
import type { QuoteFormData } from '../components/QuoteForm.astro';

import { quotesApi, gql } from '../lib/quotes-api';
import { isPostRequest, getFormData } from '../lib/request-utils';

let formData: QuoteFormData = {};
let saveError = false;

if (isPostRequest(Astro.request)) {
formData = await getFormData(Astro.request);

const movieId = await quotesApi.getMovieId(formData.movie);

if (movieId) {
const quote = {
quote: formData.quote,
saidBy: formData.saidBy,
movieId,
};

const { error } = await quotesApi.mutation(gql`
mutation($quote: QuoteInput!) {
saveQuote(input: $quote) {
id
}
}
`, { quote });

if (!error) {
return Astro.redirect('/');
} else {
saveError = true;
}
} else {
saveError = true;
}
}

Add autosuggest for movies

We can create a better experience for our users by autosuggesting the movie name +when they're adding a new quote.

Let's open up src/components/QuoteForm.astro and import our API helper methods +in the component script:

import { quotesApi, gql } from '../lib/quotes-api.js';

Then let's add in a query to our GraphQL API for all movies:

const { data } = await quotesApi.query(gql`
query {
movies {
name
}
}
`);

const movies = data?.movies || [];

Now lets update the Movie field in the component template to use the +array of movies that we've retrieved from the API:

<label for="movie" class="block">
<span>Movie</span>
<input list="movies" id="movie" name="movie" required="required" autocomplete="off" value={values.movie} class="form-input mt-1 w-full">
<datalist id="movies">
{movies.map(({ name }) => (
<option>{name}</option>
))}
</datalist>
</label>

Create an edit quote page

Let's create a new directory, src/pages/edit/:

mkdir src/pages/edit/

And inside of it, let's create a new page, [id].astro:

---
import Layout from '../../layouts/Layout.astro';
import QuoteForm, { QuoteFormData } from '../../components/QuoteForm.astro';

const id = Number(Astro.params.id);

let formValues: QuoteFormData = {};
let loadError = false;
let saveError = false;
---

<Layout title="Edit movie quote">
<main>
<h2>Edit quote</h2>
<QuoteForm action={`/edit/${id}`} values={formValues} saveError={saveError} loadError={loadError} submitLabel="Update quote" />
</main>
</Layout>

You'll see that we're using the same QuoteForm component that our add quote +page uses. Now we're going to wire up our edit page so that it can load an +existing quote from our API and save changes back to the API when the form is +submitted.

In the [id.astro] component script, let's add some code to take care of +these tasks:

---
import Layout from '../../layouts/Layout.astro';
import QuoteForm, { QuoteFormData } from '../../components/QuoteForm.astro';

import { quotesApi, gql } from '../../lib/quotes-api';
import { isPostRequest, getFormData } from '../../lib/request-utils';

const id = Number(Astro.params.id);

let formValues: QuoteFormData = {};
let loadError = false;
let saveError = false;

if (isPostRequest(Astro.request)) {
const formData = await getFormData(Astro.request);
formValues = formData;

const movieId = await quotesApi.getMovieId(formData.movie);

if (movieId) {
const quote = {
id,
quote: formData.quote,
saidBy: formData.saidBy,
movieId,
};

const { error } = await quotesApi.mutation(gql`
mutation($quote: QuoteInput!) {
saveQuote(input: $quote) {
id
}
}
`, { quote });

if (!error) {
return Astro.redirect('/');
} else {
saveError = true;
}
} else {
saveError = true;
}
} else {
const { data } = await quotesApi.query(gql`
query($id: ID!) {
getQuoteById(id: $id) {
id
quote
saidBy
movie {
id
name
}
}
}
`, { id });

if (data?.getQuoteById) {
formValues = {
...data.getQuoteById,
movie: data.getQuoteById.movie.name
};
} else {
loadError = true;
}
}
---

Load up http://localhost:3000/edit/1 in your +browser to test out the edit quote page.

Now we're going to add edit links to the quotes listing page. Let's start by +creating a new component src/components/QuoteActionEdit.astro:

---
export interface Props {
id: number;
}

const { id } = Astro.props;
---
<a href={`/edit/${id}`} class="flex items-center mr-5 text-gray-400 hover:text-yellow-600 underline decoration-yellow-600 decoration-2 underline-offset-4">
<svg class="w-6 h-6 mr-1" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24" fill="currentColor" class="w-6 h-6">
<path d="M21.731 2.269a2.625 2.625 0 00-3.712 0l-1.157 1.157 3.712 3.712 1.157-1.157a2.625 2.625 0 000-3.712zM19.513 8.199l-3.712-3.712-8.4 8.4a5.25 5.25 0 00-1.32 2.214l-.8 2.685a.75.75 0 00.933.933l2.685-.8a5.25 5.25 0 002.214-1.32l8.4-8.4z" />
<path d="M5.25 5.25a3 3 0 00-3 3v10.5a3 3 0 003 3h10.5a3 3 0 003-3V13.5a.75.75 0 00-1.5 0v5.25a1.5 1.5 0 01-1.5 1.5H5.25a1.5 1.5 0 01-1.5-1.5V8.25a1.5 1.5 0 011.5-1.5h5.25a.75.75 0 000-1.5H5.25z" />
</svg>
<span class="hover:underline hover:decoration-yellow-600">Edit</span>
</a>

Then let's import this component and use it in our listing page, +src/pages/index.astro:

---
import Layout from '../layouts/Layout.astro';
import QuoteActionEdit from '../components/QuoteActionEdit.astro';
import { quotesApi, gql } from '../lib/quotes-api';

// ...
---

<Layout title="All quotes" page="listing">
<main>
{quotes.length > 0 ? quotes.map((quote) => (
<div class="border-b mb-6">
...
<div class="flex flex-col mb-6 text-gray-400">
<span class="flex items-center">
<QuoteActionEdit id={quote.id} />
</span>
<span class="mt-4 text-gray-400 italic">Added {new Date(Number(quote.createdAt)).toUTCString()}</span>
</div>
</div>
)) : (
<p>No movie quotes have been added.</p>
)}
</main>
</Layout>

Add delete quote functionality

Our Movie Quotes app can create, retrieve and update quotes. Now we're going +to implement the D in CRUD — delete!

First let's create a new component, src/components/QuoteActionDelete.astro:

---
export interface Props {
id: number;
}

const { id } = Astro.props;
---
<form method="POST" action={`/delete/${id}`} class="form-delete-quote m-0">
<button type="submit" class="flex items-center text-gray-400 hover:text-red-700 underline decoration-red-700 decoration-2 underline-offset-4">
<svg class="w-6 h-6 mr-1" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24" fill="currentColor" class="w-6 h-6">
<path fill-rule="evenodd" d="M12 2.25c-5.385 0-9.75 4.365-9.75 9.75s4.365 9.75 9.75 9.75 9.75-4.365 9.75-9.75S17.385 2.25 12 2.25zm-1.72 6.97a.75.75 0 10-1.06 1.06L10.94 12l-1.72 1.72a.75.75 0 101.06 1.06L12 13.06l1.72 1.72a.75.75 0 101.06-1.06L13.06 12l1.72-1.72a.75.75 0 10-1.06-1.06L12 10.94l-1.72-1.72z" clip-rule="evenodd" />
</svg>
<span>Delete</span>
</button>
</form>

And then we'll drop it into our listing page, src/pages/index.astro:

---
import Layout from '../layouts/Layout.astro';
import QuoteActionEdit from '../components/QuoteActionEdit.astro';
import QuoteActionDelete from '../components/QuoteActionDelete.astro';
import { quotesApi, gql } from '../lib/quotes-api';

// ...
---

<Layout title="All quotes" page="listing">
<main>
{quotes.length > 0 ? quotes.map((quote) => (
<div class="border-b mb-6">
...
<div class="flex flex-col mb-6 text-gray-400">
<span class="flex items-center">
<QuoteActionEdit id={quote.id} />
<QuoteActionDelete id={quote.id} />
</span>
<span class="mt-4 text-gray-400 italic">Added {new Date(Number(quote.createdAt)).toUTCString()}</span>
</div>
</div>
...

At the moment when a delete form is submitted from our listing page, we get +an Astro 404 page. Let's fix this by creating a new directory, src/pages/delete/:

mkdir src/pages/delete/

And inside of it, let's create a new page, [id].astro:

---
import Layout from '../../layouts/Layout.astro';

import { quotesApi, gql } from '../../lib/quotes-api';
import { isPostRequest } from '../../lib/request-utils';

if (isPostRequest(Astro.request)) {
const id = Number(Astro.params.id);

const { error } = await quotesApi.mutation(gql`
mutation($id: ID!) {
deleteQuotes(where: { id: { eq: $id }}) {
id
}
}
`, { id });

if (!error) {
return Astro.redirect('/');
}
}
---
<Layout title="Delete movie quote">
<main>
<h2>Delete quote</h2>
<p class="text-lg bg-red-200 p-4">There was an error deleting the quote. Please try again.</p>
</main>
</Layout>

Now if we click on a delete quote button on our listings page, it should call our +GraphQL API to delete the quote. To make this a little more user friendly, let's +add in a confirmation dialog so that users don't delete a quote by accident.

Let's create a new directory, src/scripts/:

mkdir src/scripts/

And inside of that directory let's create a new file, quote-actions.js:

// src/scripts/quote-actions.js

export function confirmDeleteQuote (form) {
if (confirm('Are you sure want to delete this quote?')) {
form.submit()
}
}

Then we can pull it in as client side JavaScript on our listing page, +src/pages/index.astro:

<Layout>
...
</Layout>

<script>
import { confirmDeleteQuote } from '../scripts/quote-actions.js'

addEventListener('DOMContentLoaded', () => {
document.querySelectorAll('.form-delete-quote').forEach((deleteForm) => {
deleteForm.addEventListener('submit', (event) => {
event.preventDefault()
confirmDeleteQuote(event.currentTarget)
})
})
})
</script>

Build a "like" quote feature

We've built all the basic CRUD (Create, Retrieve, Update & Delete) features +into our application. Now let's build a feature so that users can interact +and "like" their favourite movie quotes.

To build this feature we're going to add custom functionality to our API +and then add a new component, along with some client side JavaScript, to +our frontend.

Create an API migration

We're now going to work on the code for API, under the apps/movie-quotes-api +directory.

First let's create a migration that adds a likes column to our quotes +database table. We'll create a new migration file, migrations/003.do.sql:

ALTER TABLE quotes ADD COLUMN likes INTEGER default 0;

This migration will automatically be applied when we next start our Platformatic +API.

Create an API plugin

To add custom functionality to our Platformatic API, we need to create a +Fastify plugin and +update our API configuration to use it.

Let's create a new file, plugin.js, and inside it we'll add the skeleton +structure for our plugin:

// plugin.js

'use strict'

module.exports = async function plugin (app) {
app.log.info('plugin loaded')
}

Now let's register our plugin in our API configuration file, platformatic.db.json:

{
...
"migrations": {
"dir": "./migrations"
},
"plugins": {
"paths": ["./plugin.js"]
}
}

And then we'll start up our Platformatic API:

npm run dev

We should see log messages that tell us that our new migration has been +applied and our plugin has been loaded:

[10:09:20.052] INFO (146270): running 003.do.sql
[10:09:20.129] INFO (146270): plugin loaded
[10:09:20.209] INFO (146270): server listening
url: "http://127.0.0.1:3042"

Now it's time to start adding some custom functionality inside our plugin.

Add a REST API route

We're going to add a REST route to our API that increments the count of +likes for a specific quote: /quotes/:id/like

First let's add fluent-json-schema as a dependency for our API:

npm install fluent-json-schema

We'll use fluent-json-schema to help us generate a JSON Schema. We can then +use this schema to validate the request path parameters for our route (id).

tip

You can use fastify-type-provider-typebox or typebox if you want to convert your JSON Schema into a Typescript type. See this GitHub thread to have a better overview about it. Look at the example below to have a better overview.

Here you can see in practice of to leverage typebox combined with fastify-type-provider-typebox:

import { FastifyInstance } from "fastify";
import { Static, Type } from "@sinclair/typebox";
import { TypeBoxTypeProvider } from "@fastify/type-provider-typebox";

/**
* Creation of the JSON schema needed to validate the params passed to the route
*/
const schemaParams = Type.Object({
num1: Type.Number(),
num2: Type.Number(),
});

/**
* We convert the JSON schema to the TypeScript type, in this case:
* {
num1: number;
num2: number;
}
*/
type Params = Static<typeof schemaParams>;

/**
* Here we can pass the type previously created to our syncronous unit function
*/
const multiplication = ({ num1, num2 }: Params) => num1 * num2;

export default async function (app: FastifyInstance) {
app.withTypeProvider<TypeBoxTypeProvider>().get(
"/multiplication/:num1/:num2",
{ schema: { params: schemaParams } },
/**
* Since we leverage `withTypeProvider<TypeBoxTypeProvider>()`,
* we no longer need to explicitly define the `params`.
* The will be automatically inferred as:
* {
num1: number;
num2: number;
}
*/
({ params }) => multiplication(params)
);
}

Now let's add our REST API route in plugin.js:

'use strict'

const S = require('fluent-json-schema')

module.exports = async function plugin (app) {
app.log.info('plugin loaded')

// This JSON Schema will validate the request path parameters.
// It reuses part of the schema that Platormatic DB has
// automatically generated for our Quote entity.
const schema = {
params: S.object().prop('id', app.getSchema('Quote').properties.id)
}

app.post('/quotes/:id/like', { schema }, async function (request, response) {
return {}
})
}

We can now make a POST request to our new API route:

curl --request POST http://localhost:3042/quotes/1/like
info

Learn more about how validation works in the +Fastify validation documentation.

Our API route is currently returning an empty object ({}). Let's wire things +up so that it increments the number of likes for the quote with the specified ID. +To do this we'll add a new function inside of our plugin:

module.exports = async function plugin (app) {
app.log.info('plugin loaded')

async function incrementQuoteLikes (id) {
const { db, sql } = app.platformatic

const result = await db.query(sql`
UPDATE quotes SET likes = likes + 1 WHERE id=${id} RETURNING likes
`)

return result[0]?.likes
}

// ...
}

And then we'll call that function in our route handler function:

app.post('/quotes/:id/like', { schema }, async function (request, response) {
return { likes: await incrementQuoteLikes(request.params.id) }
})

Now when we make a POST request to our API route:

curl --request POST http://localhost:3042/quotes/1/like

We should see that the likes value for the quote is incremented every time +we make a request to the route.

{"likes":1}

Add a GraphQL API mutation

We can add a likeQuote mutation to our GraphQL API by reusing the +incrementQuoteLikes function that we just created.

Let's add this code at the end of our plugin, inside plugin.js:

module.exports = async function plugin (app) {
// ...

app.graphql.extendSchema(`
extend type Mutation {
likeQuote(id: ID!): Int
}
`)

app.graphql.defineResolvers({
Mutation: {
likeQuote: async (_, { id }) => await incrementQuoteLikes(id)
}
})
}

The code we've just added extends our API's GraphQL schema and defines +a corresponding resolver for the likeQuote mutation.

We can now load up GraphiQL in our web browser and try out our new likeQuote +mutation with this GraphQL query:

mutation {
likeQuote(id: 1)
}
info

Learn more about how to extend the GraphQL schema and define resolvers in the +Mercurius API documentation.

Enable CORS on the API

When we build "like" functionality into our frontend, we'll be making a client +side HTTP request to our GraphQL API. Our backend API and our frontend are running +on different origins, so we need to configure our API to allow requests from +the frontend. This is known as Cross-Origin Resource Sharing (CORS).

To enable CORS on our API, let's open up our API's .env file and add in +a new setting:

PLT_SERVER_CORS_ORIGIN=http://localhost:3000

The value of PLT_SERVER_CORS_ORIGIN is our frontend application's origin.

Now we can add a cors configuration object in our API's configuration file, +platformatic.db.json:

{
"server": {
"logger": {
"level": "{PLT_SERVER_LOGGER_LEVEL}"
},
"hostname": "{PLT_SERVER_HOSTNAME}",
"port": "{PORT}",
"cors": {
"origin": "{PLT_SERVER_CORS_ORIGIN}"
}
},
...
}

The HTTP responses from all endpoints on our API will now include the header:

access-control-allow-origin: http://localhost:3000

This will allow JavaScript running on web pages under the http://localhost:3000 +origin to make requests to our API.

Add like quote functionality

Now that our API supports "liking" a quote, let's integrate it as a feature in +our frontend.

First we'll create a new component, src/components/QuoteActionLike.astro:

---
export interface Props {
id: number;
likes: number;
}

const { id, likes } = Astro.props;
---
<span data-quote-id={id} class="like-quote cursor-pointer mr-5 flex items-center">
<svg class="like-icon w-6 h-6 mr-2 text-red-600" xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 24 24" stroke-width="1.5" stroke="currentColor" class="w-6 h-6">
<path stroke-linecap="round" stroke-linejoin="round" d="M21 8.25c0-2.485-2.099-4.5-4.688-4.5-1.935 0-3.597 1.126-4.312 2.733-.715-1.607-2.377-2.733-4.313-2.733C5.1 3.75 3 5.765 3 8.25c0 7.22 9 12 9 12s9-4.78 9-12z" />
</svg>
<span class="likes-count w-8">{likes}</span>
</span>

<style>
.like-quote:hover .like-icon,
.like-quote.liked .like-icon {
fill: currentColor;
}
</style>

And in our listing page, src/pages/index.astro, let's import our new +component and add it into the interface:

---
import Layout from '../layouts/Layout.astro';
import QuoteActionEdit from '../components/QuoteActionEdit.astro';
import QuoteActionDelete from '../components/QuoteActionDelete.astro';
import QuoteActionLike from '../components/QuoteActionLike.astro';
import { quotesApi, gql } from '../lib/quotes-api';

// ...
---

<Layout title="All quotes" page="listing">
<main>
{quotes.length > 0 ? quotes.map((quote) => (
<div class="border-b mb-6">
...
<div class="flex flex-col mb-6 text-gray-400">
<span class="flex items-center">
<QuoteActionLike id={quote.id} likes={quote.likes} />
<QuoteActionEdit id={quote.id} />
<QuoteActionDelete id={quote.id} />
</span>
<span class="mt-4 text-gray-400 italic">Added {new Date(Number(quote.createdAt)).toUTCString()}</span>
</div>
</div>
...

Then let's update the GraphQL query in this component's script to retrieve the +likes field for all quotes:

const { data } = await quotesApi.query(gql`
query {
quotes {
id
quote
saidBy
likes
createdAt
movie {
id
name
}
}
}
`);

Now we have the likes showing for each quote, let's wire things up so that +clicking on the like component for a quote will call our API and add a like.

Let's open up src/scripts/quote-actions.js and add a new function that +makes a request to our GraphQL API:

import { quotesApi, gql } from '../lib/quotes-api.js'

export function confirmDeleteQuote (form) {
if (confirm('Are you sure want to delete this quote?')) {
form.submit()
}
}

export async function likeQuote (likeQuote) {
likeQuote.classList.add('liked')
likeQuote.classList.remove('cursor-pointer')

const id = Number(likeQuote.dataset.quoteId)

const { data } = await quotesApi.mutation(gql`
mutation($id: ID!) {
likeQuote(id: $id)
}
`, { id })

if (data?.likeQuote) {
likeQuote.querySelector('.likes-count').innerText = data.likeQuote
}
}

And then let's attach the likeQuote function to the click event for each +like quote component on our listing page. We can do this by adding a little +extra code inside the <script> block in src/pages/index.astro:

<script>
import { confirmDeleteQuote, likeQuote } from '../scripts/quote-actions.js'

addEventListener('DOMContentLoaded', () => {
document.querySelectorAll('.form-delete-quote').forEach((deleteForm) => {
deleteForm.addEventListener('submit', (event) => {
event.preventDefault()
confirmDeleteQuote(event.currentTarget)
})
})

document.querySelectorAll('.like-quote').forEach((container) => {
container.addEventListener('click', (event) => likeQuote(event.currentTarget), { once: true })
})
})
</script>

Sort the listing by top quotes

Now that users can like their favourite quotes, as a final step, we'll allow +for sorting quotes on the listing page by the number of likes they have.

Let's update src/pages/index.astro to read a sort query string parameter +and use it the GraphQL query that we make to our API:

---
// ...

const allowedSortFields = ["createdAt", "likes"];
const searchParamSort = new URL(Astro.request.url).searchParams.get("sort");
const sort = allowedSortFields.includes(searchParamSort) ? searchParamSort : "createdAt";

const { data } = await quotesApi.query(gql`
query {
quotes(orderBy: {field: ${sort}, direction: DESC}) {
id
quote
saidBy
likes
createdAt
movie {
id
name
}
}
}
`);

const quotes = data?.quotes || [];
---
<Layout title="All quotes" page={`listing-${sort}`}>
...

Then let's replace the 'All quotes' link in the <nav> in src/layouts/Layout.astro +with two new links:

<nav class="prose mx-auto mb-6 border-y border-gray-200 flex">
<a href="/?sort=createdAt" class={`p-3 ${page === "listing-createdAt" && navActiveClasses}`}>Latest quotes</a>
<a href="/?sort=likes" class={`p-3 ${page === "listing-likes" && navActiveClasses}`}>Top quotes</a>
<a href="/add" class={`p-3 ${page === "add" && navActiveClasses}`}>Add a quote</a>
</nav>

With these few extra lines of code, our users can now sort quotes by when they +were created or by the number of likes that they have. Neat!

Wrapping up

And we're done — you now have the knowledge you need to build a full stack +application on top of Platformatic DB.

We can't wait to see what you'll build next!

+ + + + \ No newline at end of file diff --git a/docs/getting-started/new-api-project-instructions/index.html b/docs/getting-started/new-api-project-instructions/index.html new file mode 100644 index 00000000000..b10870946b2 --- /dev/null +++ b/docs/getting-started/new-api-project-instructions/index.html @@ -0,0 +1,20 @@ + + + + + +new-api-project-instructions | Platformatic Open Source Software + + + + + +
+
Version: 0.42.1

new-api-project-instructions

Run this command in your terminal to start the Platformatic creator wizard:

npm create platformatic@latest

This interactive command-line tool will ask you some questions about how you'd +like to set up your new Platformatic project. For this guide, select these options:

- Which kind of project do you want to create?  => DB
- Where would you like to create your project? => quick-start
- Do you want to create default migrations? => Yes
- Do you want to create a plugin? => Yes
- Do you want to use TypeScript? => No
- Do you want to install dependencies? => Yes (this can take a while)
- Do you want to apply the migrations? => Yes
- Do you want to generate types? => Yes
- Do you want to create the github action to deploy this application to Platformatic Cloud dynamic workspace? => No
- Do you want to create the github action to deploy this application to Platformatic Cloud static workspace? => No

Once the wizard is complete, you'll have a Platformatic app project in the +folder quick-start, with example migration files and a plugin script.

info

Make sure you run the npm/yarn/pnpm command install command manually if you +don't ask the wizard to do it for you.

+ + + + \ No newline at end of file diff --git a/docs/getting-started/quick-start-guide/index.html b/docs/getting-started/quick-start-guide/index.html new file mode 100644 index 00000000000..14f68b1b8c6 --- /dev/null +++ b/docs/getting-started/quick-start-guide/index.html @@ -0,0 +1,38 @@ + + + + + +Quick Start Guide | Platformatic Open Source Software + + + + + +
+
Version: 0.42.1

Quick Start Guide

In this guide you'll learn how to create and run your first API with +Platformatic DB. Let's get started!

info

This guide uses SQLite for the database, but +Platformatic DB also supports PostgreSQL, +MySQL and MariaDB databases.

Prerequisites

Platformatic supports macOS, Linux and Windows (WSL recommended).

To follow along with this guide you'll need to have these things installed:

Create a new API project

Automatic CLI

Run this command in your terminal to start the Platformatic creator wizard:

npm create platformatic@latest

This interactive command-line tool will ask you some questions about how you'd +like to set up your new Platformatic project. For this guide, select these options:

- Which kind of project do you want to create?  => DB
- Where would you like to create your project? => quick-start
- Do you want to create default migrations? => Yes
- Do you want to create a plugin? => Yes
- Do you want to use TypeScript? => No
- Do you want to install dependencies? => Yes (this can take a while)
- Do you want to apply the migrations? => Yes
- Do you want to generate types? => Yes
- Do you want to create the github action to deploy this application to Platformatic Cloud dynamic workspace? => No
- Do you want to create the github action to deploy this application to Platformatic Cloud static workspace? => No

Once the wizard is complete, you'll have a Platformatic app project in the +folder quick-start, with example migration files and a plugin script.

info

Make sure you run the npm/yarn/pnpm command install command manually if you +don't ask the wizard to do it for you.

Start your API server

In your project directory, run this command to start your API server:

npm start

Your Platformatic API is now up and running! 🌟

This command will:

  • Automatically map your SQL database to REST and GraphQL API interfaces.
  • Start the Platformatic API server.

You can jump down to Next steps or read on to learn more about +the project files that the wizard has created for you.

Check the database schema

In your project directory (quick-start), open the migrations directory that can store your database migration files that will contain both the 001.do.sql and 001.undo.sql files. The 001.do.sql file contains the SQL statements to create the database objects, while the 001.undo.sql file contains the SQL statements to drop them.

migrations/001.do.sql
CREATE TABLE IF NOT EXISTS movies (
id INTEGER PRIMARY KEY,
title TEXT NOT NULL
);

Note that this migration has been already applied by Platformatic creator.

Check your API configuration

In your project directory, check the Platformatic configuration file named +platformatic.db.json and the environment file named .env:

The created configuration tells Platformatic to:

  • Run an API server on http://127.0.0.1:3042/
  • Connect to an SQLite database stored in a file named db.sqlite
  • Look for database migration files in the migrations directory
  • Load the plugin file named plugin.js and automatically generate types
tip

The Configuration reference explains all of the +supported configuration options.

Manual setup

Create a directory for your new API project:

mkdir quick-start

cd quick-start

Then create a package.json file and install the platformatic +CLI as a project dependency:

npm init --yes

npm install platformatic

Add a database schema

In your project directory (quick-start), create a file for your sqlite3 database and also, a migrations directory to +store your database migration files:

touch db.sqlite

mkdir migrations

Then create a new migration file named 001.do.sql in the migrations +directory.

Copy and paste this SQL query into the migration file:

migrations/001.do.sql
CREATE TABLE movies (
id INTEGER PRIMARY KEY,
title VARCHAR(255) NOT NULL
);

When it's run by Platformatic, this query will create a new database table +named movies.

tip

You can check syntax for SQL queries on the Database.Guide SQL Reference.

Configure your API

In your project directory, create a new Platformatic configuration file named +platformatic.db.json.

Copy and paste in this configuration:

platformatic.db.json
{
"server": {
"hostname": "127.0.0.1",
"port": "3042"
},
"db": {
"connectionString": "sqlite://./db.sqlite"
},
"migrations": {
"dir": "./migrations",
"autoApply": "true"
}
}

This configuration tells Platformatic to:

  • Run an API server on http://127.0.0.1:3042/
  • Connect to an SQLite database stored in a file named db.sqlite
  • Look for, and apply the database migrations specified in the migrations directory
tip

The Configuration reference explains all of the +supported configuration options.

Start your API server

In your project directory, use the Platformatic CLI to start your API server:

npx platformatic db start

This will:

  • Automatically map your SQL database to REST and GraphQL API interfaces.
  • Start the Platformatic API server.

Your Platformatic API is now up and running! 🌟

Next steps

Use the REST API interface

You can use cURL to make requests to the REST interface of your API, for example:

Create a new movie

curl -X POST -H "Content-Type: application/json" \
-d "{ \"title\": \"Hello Platformatic DB\" }" \
http://localhost:3042/movies

You should receive a response from your API like this:

{"id":1,"title":"Hello Platformatic DB"}

Get all movies

curl http://localhost:3042/movies

You should receive a response from your API like this, with an array +containing all the movies in your database:

[{"id":1,"title":"Hello Platformatic DB"}]
tip

If you would like to know more about what routes are automatically available, +take a look at the REST API reference +for an overview of the REST interface that the generated API provides.

Swagger OpenAPI documentation

You can explore the OpenAPI documentation for your REST API in the Swagger UI at +http://localhost:3042/documentation

Use the GraphQL API interface

Open http://localhost:3042/graphiql in your +web browser to explore the GraphQL interface of your API.

Try out this GraphQL query to retrieve all movies from your API:

query {
movies {
id
title
}
}
tip

Learn more about your API's GraphQL interface in the +GraphQL API reference.

+ + + + \ No newline at end of file diff --git a/docs/guides/add-custom-functionality/extend-graphql/index.html b/docs/guides/add-custom-functionality/extend-graphql/index.html new file mode 100644 index 00000000000..17e377ea3c8 --- /dev/null +++ b/docs/guides/add-custom-functionality/extend-graphql/index.html @@ -0,0 +1,18 @@ + + + + + +Extend GraphQL Schema | Platformatic Open Source Software + + + + + +
+
Version: 0.42.1

Extend GraphQL Schema

Sum Function

Copy and paste this code into ./sample-plugin.js file

'use strict'
module.exports = async(app, opts) => {
app.graphql.extendSchema(`
extend type Query {
add(x: Int, y: Int): Int
}
`)
app.graphql.defineResolvers({
Query: {
add: async (_, { x, y }) => x + y
}
})
}

This will add a new GraphQL query called add which will simply add the two inputs x and y provided.

You don't need to reload the server, since it will watch this file and hot-reload itself. +Let's query the server with the following body


query{
add(x: 1, y: 2)
}

You can use curl command to run this query

$ curl --location --request POST 'http://localhost:3042/graphql' \
--header 'Content-Type: application/json' \
--data-raw '{"query":"query{\n add(x: 1, y: 2)\n}"}'

You will get this output, with the sum.

{
"data": {
"add": 3
}
}

Extend Entities API

Let's implement a getPageByTitle query

'use strict'
module.exports = async(app, opts) => {
app.graphql.extendSchema(`
extend type Query {
getPageByTitle(title: String): Page
}
`)
app.graphql.defineResolvers({
Query: {
getPageByTitle: async(_, { title }) => {
const res = await app.platformatic.entities.page.find({
where: {
title: {
eq: title
}
}
})
if (res) {
return res[0]
}
return null
}
}
})
}

Page GraphQL type is already defined by Platformatic DB on start.

We are going to run this code against this GraphQL query

query{
getPageByTitle(title: "First Page"){
id
title
}
}

You can use curl command to run this query

$ curl --location --request POST 'http://localhost:3042/graphql' \
--header 'Content-Type: application/json' \
--data-raw '{"query":"query{\n getPageByTitle(title: \"First Page\"){\n id\n title\n }\n}"}'

You will get an output similar to this

{
"data": {
"getPageByTitle": {
"id": "1",
"title": "First Page"
}
}
}
+ + + + \ No newline at end of file diff --git a/docs/guides/add-custom-functionality/extend-rest/index.html b/docs/guides/add-custom-functionality/extend-rest/index.html new file mode 100644 index 00000000000..4a44fc31d9d --- /dev/null +++ b/docs/guides/add-custom-functionality/extend-rest/index.html @@ -0,0 +1,17 @@ + + + + + +Extend REST API | Platformatic Open Source Software + + + + + +
+
Version: 0.42.1

Extend REST API

We will follow same examples implemented in GraphQL examples: a sum function and an API to get pages by title.

Sum Function

Copy and paste this code into ./sample-plugin.js file

'use strict'
module.exports = async(app, opts) => {
app.post('/sum', async(req, reply) => {
const { x, y } = req.body
return { sum: (x + y)}
})
}

You don't need to reload the server, since it will watch this file and hot-reload itself.

Let's make a POST /sum request to the server with the following body

{
"x": 1,
"y": 2
}

You can use curl command to run this query

$ curl --location --request POST 'http://localhost:3042/sum' \
--header 'Content-Type: application/json' \
--data-raw '{
"x": 1,
"y": 2
}'

You will get this output, with the sum.

{
"sum": 3
}

Extend Entities API

Let's implement a /page-by-title endpoint, using Entities API

'use strict'
module.exports = async(app, opts) => {
app.get('/page-by-title', async(req, reply) => {
const { title } = req.query
const res = await app.platformatic.entities.page.find({
where: {
title: {
eq: title
}
}
})
if (res) {
return res[0]
}
return null
})
}

We will make a GET /page-by-title?title=First%20Page request, and we expect a single page as output.

You can use curl command to run this query

$ curl --location --request GET 'http://localhost:3042/page-by-title?title=First Page'

You will get an output similar to this

{
"id": "1",
"title": "First Page",
"body": "This is the first sample page"
}
+ + + + \ No newline at end of file diff --git a/docs/guides/add-custom-functionality/introduction/index.html b/docs/guides/add-custom-functionality/introduction/index.html new file mode 100644 index 00000000000..aae5254823b --- /dev/null +++ b/docs/guides/add-custom-functionality/introduction/index.html @@ -0,0 +1,17 @@ + + + + + +Add Custom Functionality | Platformatic Open Source Software + + + + + +
+
Version: 0.42.1

Add Custom Functionality

If you want to extend Platformatic DB features, it is possible to register a plugin, which will be in the form of a standard Fastify plugin.

The config file will specify where the plugin file is located as the example below:

{
...
"plugins": {
"paths": ["./plugin/index.js"]
}
}

The path is relative to the config file path.

Since it uses fastify-isolate under the hood, all other options of that package may be specified under the plugin property.

Once the config file is set up, you can write your plugin

module.exports = async function (app) {
app.log.info('plugin loaded')
// Extend GraphQL Schema with resolvers
app.graphql.extendSchema(`
extend type Query {
add(x: Int, y: Int): Int
}
`)
app.graphql.defineResolvers({
Query: {
add: async (_, { x, y }) => x + y
}
})

// Create a new route, see https://www.fastify.io/docs/latest/Reference/Routes/ for more info
app.post('/sum', (req, reply) => {
const {x, y} = req.body
return { result: x + y }
})

// access platformatic entities data
app.get('/all-entities', (req, reply) => {
const entities = Object.keys(app.platformatic.entities)
return { entities }
})
}

+ + + + \ No newline at end of file diff --git a/docs/guides/add-custom-functionality/prerequisites/index.html b/docs/guides/add-custom-functionality/prerequisites/index.html new file mode 100644 index 00000000000..4384ff6b5e8 --- /dev/null +++ b/docs/guides/add-custom-functionality/prerequisites/index.html @@ -0,0 +1,17 @@ + + + + + +Prerequisites | Platformatic Open Source Software + + + + + +
+
Version: 0.42.1

Prerequisites

In the following examples we assume you already

  • cloned platformatic/platformatic repo from Github
  • ran pnpm install to install all dependencies
  • have Docker and docker-compose installed and running on your machine

Config File

Create a platformatic.db.json file in the root project, it will be loaded automatically by Platformatic (no need of -c, --config flag).

{
"server": {
"hostname": "127.0.0.1",
"port": 3042,
"logger": {
"level": "info"
}
},
"db": {
"connectionString": "postgres://postgres:postgres@127.0.0.1/postgres"
},
"migrations": {
"dir": "./migrations",
"table": "versions"
},
"plugins": {
"paths": ["plugin.js"]
}
}
  • Once Platformatic DB starts, its API will be available at http://127.0.0.1:3042
  • It will connect and read the schema from a PostgreSQL DB
  • Will read migrations from ./migrations directory
  • Will load custom functionallity from ./plugin.js file.

Database and Migrations

Start the database using the sample docker-compose.yml file.

$ docker-compose up -d postgresql

For migrations create a ./migrations directory and a 001.do.sql file with following contents

CREATE TABLE pages (
id SERIAL PRIMARY KEY,
title VARCHAR(255) NOT NULL,
body TEXT NOT NULL
);
INSERT INTO pages (title, body) VALUES ('First Page', 'This is the first sample page');
INSERT INTO pages (title, body) VALUES ('Second Page', 'This is the second sample page');
INSERT INTO pages (title, body) VALUES ('Third Page', 'This is the third sample page');

Plugin

Copy and paste this boilerplate code into ./plugin.js file. We will fill this in the examples.

'use strict'

module.exports = async (app, opts) {
// we will fill this later
}

Start the server

Run

$ platformatic db start

You will get an output similar to this

                           /////////////
///// /////
/// ///
/// ///
/// ///
&& /// /// &&
&&&&&& /// /// &&&&&&
&&&& /// /// &&&&
&&& /// /// &&&&&&&&&&&&
&&& /// /////// //// && &&&&&
&& /// /////////////// &&&
&&& /// /// &&&
&&& /// // &&
&&& /// &&
&&& /// &&&
&&&& /// &&&
&&&&&% /// &&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&
///
///
///
///
///
///

[11:19:46.562] INFO (65122): running 001.do.sql
[11:19:46.929] INFO (65122): server listening
url: "http://127.0.0.1:3042"

Now is possible to create some examples, like extend GraphQL Schema, extend REST API

+ + + + \ No newline at end of file diff --git a/docs/guides/add-custom-functionality/raw-sql/index.html b/docs/guides/add-custom-functionality/raw-sql/index.html new file mode 100644 index 00000000000..c7bb58bd86c --- /dev/null +++ b/docs/guides/add-custom-functionality/raw-sql/index.html @@ -0,0 +1,17 @@ + + + + + +Raw SQL queries | Platformatic Open Source Software + + + + + +
+
Version: 0.42.1

Raw SQL queries

To run raw SQL queries using plugins, use app.platformatic.db.query method and passe to it a sql query using the app.platformatic.sql method.

'use strict'
module.exports = async(app, opts) => {
app.graphql.extendSchema(`
type YearlySales {
year: Int
sales: Int
}

extend type Query {
yearlySales: [YearlySales]
}
`)
app.graphql.defineResolvers({
Query: {
yearlySales: async(_, { title }) => {
const { db, sql } = app.platformatic;
const res = await db.query(sql(`
SELECT
YEAR(created_at) AS year,
SUM(amount) AS sales
FROM
orders
GROUP BY
YEAR(created_at)
`))
return res
}
}
})
}
+ + + + \ No newline at end of file diff --git a/docs/guides/compiling-typescript-for-deployment/index.html b/docs/guides/compiling-typescript-for-deployment/index.html new file mode 100644 index 00000000000..19b426a73fb --- /dev/null +++ b/docs/guides/compiling-typescript-for-deployment/index.html @@ -0,0 +1,25 @@ + + + + + +Compiling Typescript for Deployment | Platformatic Open Source Software + + + + + +
+
Version: 0.42.1

Compiling Typescript for Deployment

Platformatic Service provides automatic TypeScript compilation during the startup +of your Node.js server. While this provides an amazing developer experience, in production it adds additional +start time and it requires more resources. In this guide, we show how to compile your TypeScript +source files before shipping to a server.

Setup

The following is supported by all Platformatic applications, as they are all based on the same plugin system. +If you have generated your application using npx create-platformatic@latest, you will have a similar section in your config file:

{
...
"plugins": {
"paths": [{
"path": "plugins",
"encapsulate": false
}, "routes"],
"typescript": "{PLT_TYPESCRIPT}"
}
}

Note that the {PLT_TYPESCRIPT} will be automatically replaced with the PLT_TYPESCRIPT environment variable, that is configured in your +.env (and .env.sample) file:

PLT_TYPESCRIPT=true

Older Platformatic applications might not have the same layout, if so you can update your settings to match (after updating your dependencies).

Compiling for deployment

Compiling for deployment is then as easy as running plt service compile in that same folder. +Rememeber to set PLT_TYPESCRIPT=false in your environment variables in the deployed environments.

Usage with Runtime

If you are building a Runtime-based application, you will need +to compile every service independently or use the plt runtime compile command.

Avoid shipping TypeScript sources

If you want to avoid shipping the TypeScript sources you need to configure Platformatic with the location +where your files have been built by adding an outDir option:

{
...
"plugins": {
"paths": [{
"path": "plugins",
"encapsulate": false
}, "routes"],
"typescript": {
"enabled": "{PLT_TYPESCRIPT}",
"outDir": "dist"
}
}
}

This is not necessary if you include tsconfig.json together with the compiled code.

+ + + + \ No newline at end of file diff --git a/docs/guides/debug-platformatic-db/index.html b/docs/guides/debug-platformatic-db/index.html new file mode 100644 index 00000000000..e05b3d34bfa --- /dev/null +++ b/docs/guides/debug-platformatic-db/index.html @@ -0,0 +1,17 @@ + + + + + +Debug Platformatic DB | Platformatic Open Source Software + + + + + +
+
Version: 0.42.1

Debug Platformatic DB

Error: No tables found in the database

  • Verify your database connection string is correct in your Platformatic DB configuration
    • Make sure the database name is correct
  • Ensure that you have run the migration command npx platformatic db migrations apply before starting the server. See the Platformatic DB Migrations documentation for more information on working with migrations.

Logging SQL queries

You can see all the queries that are being run against your database in your terminal by setting the logger level to trace in your platformatic.db.json config file:

platformatic.db.json
{
"server": {
"logger": {
"level": "trace"
}
}
}
+ + + + \ No newline at end of file diff --git a/docs/guides/deploying-on-lambda/index.html b/docs/guides/deploying-on-lambda/index.html new file mode 100644 index 00000000000..5fa121b1435 --- /dev/null +++ b/docs/guides/deploying-on-lambda/index.html @@ -0,0 +1,26 @@ + + + + + +Deploying on AWS Lambda | Platformatic Open Source Software + + + + + +
+
Version: 0.42.1

Deploying on AWS Lambda

It is possible to deploy Platformatic applications to AWS Lambda +by leveraging @fastify/aws-lambda.

Once you set up your Platformatic DB application, such as following +our tutorial, you can create a +server.mjs file as follows:

import awsLambdaFastify from '@fastify/aws-lambda'
import { buildServer } from '@platformatic/db'

const app = await buildServer('./platformatic.db.json')
// You can use the same approach with both Platformatic DB and
// and service
// const app = await buildServer('./platformatic.service.json')

// The following also work for Platformatic Service applications
// import { buildServer } from '@platformatic/service'
export const handler = awsLambdaFastify(app)

// Loads the Application, must be after the call to `awsLambdaFastify`
await app.ready()

This would be the entry point for your AWS Lambda function.

Avoiding cold start

Caching the DB schema

If you use Platformatic DB, you want to turn on the schemalock +configuration to cache the schema +information on disk.

Set the db.schemalock configuration to true, start the application, +and a schema.lock file should appear. Make sure to commit that file and +deploy your lambda.

Provisioned concurrency

Since AWS Lambda now enables the use of ECMAScript (ES) modules in Node.js 14 runtimes, +you could lower the cold start latency when used with Provisioned Concurrency +thanks to the top-level await functionality. (Excerpt taken from @fastify/aws-lambda)

+ + + + \ No newline at end of file diff --git a/docs/guides/deployment/advanced-fly-io-deployment/index.html b/docs/guides/deployment/advanced-fly-io-deployment/index.html new file mode 100644 index 00000000000..21a163a6642 --- /dev/null +++ b/docs/guides/deployment/advanced-fly-io-deployment/index.html @@ -0,0 +1,22 @@ + + + + + +Advanced Fly.io Deployment | Platformatic Open Source Software + + + + + +
+
Version: 0.42.1

Advanced Fly.io Deployment

Techniques used in this guide are based on the Deploy to Fly.io with SQLite +deployment guide.

Adding sqlite for debugging

With a combination of Docker and Fly.io, you can create an easy way to debug +your sqlite aplication without stopping your application or exporting the data. +At the end of this guide, you will be able to run fly ssh console -C db-cli to +be dropped into your remote database.

Start by creating a script for launching the database, calling it db-cli.sh:

#!/bin/sh
set -x
# DSN will be defined in the Dockerfile
sqlite3 $DSN

Create a new Dockerfile which will act as the build and deployment image:

FROM node:18-alpine

# Setup sqlite viewer
RUN apk add sqlite
ENV DSN "/app/.platformatic/data/app.db"
COPY db-cli.sh /usr/local/bin/db-cli
RUN chmod +x /usr/local/bin/db-cli

WORKDIR /app
COPY package.json package.json
COPY package-lock.json package-lock.json

RUN npm ci --omit=dev

COPY platformatic.db.json platformatic.db.json

COPY migrations migrations
# Uncomment if your application is running a plugin
# COPY plugin.js plugin.js

EXPOSE 8080

CMD ["npm", "start"]

Add a start script to your package.json:

{
"scripts": {
"start": "platformatic db"
}
}

With Fly, it becomes straightforward to connect directly to the database by +running the following command from your local machine:

fly ssh console -C db-cli
+ + + + \ No newline at end of file diff --git a/docs/guides/deployment/deploy-to-fly-io-with-sqlite/index.html b/docs/guides/deployment/deploy-to-fly-io-with-sqlite/index.html new file mode 100644 index 00000000000..e4596ceba48 --- /dev/null +++ b/docs/guides/deployment/deploy-to-fly-io-with-sqlite/index.html @@ -0,0 +1,33 @@ + + + + + +Deploy to Fly.io with SQLite | Platformatic Open Source Software + + + + + +
+
Version: 0.42.1

Deploy to Fly.io with SQLite

note

To follow this how-to guide, you'll first need to install the Fly CLI and create +an account by following this official guide. +You will also need an existing Platformatic DB project, please check out our +getting started guide if needed.

Navigate to your Platformatic DB project in the terminal on your local machine. +Run fly launch and follow the prompts. When it asks if you want to deploy +now, say "no" as there are a few things that you'll need to configure first.

You can also create the fly application with one line. This will create your +application in London (lhr):

fly launch --no-deploy --generate-name --region lhr --org personal --path .

The fly CLI should have created a fly.toml file in your project +directory.

Explicit builder

The fly.toml file may be missing an explicit builder setting. To have +consistent builds, it is best to add a build section:

[build]
builder = "heroku/buildpacks:20"

Database storage

Create a volume for database storage, naming it data:

fly volumes create data

This will create storage in the same region as the application. The volume +defaults to 3GB size, use -s to change the size. For example, -s 10 is 10GB.

Add a mounts section in fly.toml:

[mounts]
source = "data"
destination = "/app/.platformatic/data"

Create a directory in your project where your SQLite database will be created:

mkdir -p .platformatic/data

touch .platformatic/data/.gitkeep

The .gitkeep file ensures that this directory will always be created when +your application is deployed.

You should also ensure that your SQLite database is ignored by Git. This helps +avoid inconsistencies when your application is deployed:

echo "*.db" >> .gitignore

The command above assumes that your SQLite database file ends with the extension +.db — if the extension is different then you must change the command to match.

Change the connection string to an environment variable and make sure that +migrations are autoApplying (for platformatic@^0.4.0) in platformatic.db.json:

{
"db": {
"connectionString": "{DATABASE_URL}"
},
"migrations": {
"dir": "./migrations",
"autoApply": true
}
}

Configure server

Make sure that your platformatic.db.json uses environment variables +for the server section:

{
"server": {
"logger": {
"level": "{PLT_SERVER_LOGGER_LEVEL}"
},
"hostname": "{PLT_SERVER_HOSTNAME}",
"port": "{PORT}"
}
}

Configure environment

Start with your local environment, create a .env file and put the following:

PORT=3042
PLT_SERVER_HOSTNAME=127.0.0.1
PLT_SERVER_LOGGER_LEVEL=debug
DATABASE_URL=sqlite://.platformatic/data/movie-quotes.db

Avoid accidental leaks by ignoring your .env file:

echo ".env" >> .gitignore

This same configuration needs to added to fly.toml:

[env]
PORT = 8080
PLT_SERVER_HOSTNAME = "0.0.0.0"
PLT_SERVER_LOGGER_LEVEL = "info"
DATABASE_URL = "sqlite:///app/.platformatic/data/movie-quotes.db"

Deploy application

A valid package.json will be needed so if you do not have one, generate one +by running npm init.

In your package.json, make sure there is a start script to run your +application:

{
"scripts": {
"start": "platformatic db"
}
}

Before deploying, make sure a .dockerignore file is created:

cp .gitignore .dockerignore

Finally, deploy the application to Fly by running:

fly deploy
+ + + + \ No newline at end of file diff --git a/docs/guides/deployment/index.html b/docs/guides/deployment/index.html new file mode 100644 index 00000000000..981a54511e1 --- /dev/null +++ b/docs/guides/deployment/index.html @@ -0,0 +1,46 @@ + + + + + +Deployment | Platformatic Open Source Software + + + + + +
+
Version: 0.42.1

Deployment

Applications built with Platformatic DB can be deployed to a hosting service +in the same way as any other Node.js application. This guide covers a few +things that will help smooth the path from development to production.

Running a Platformatic DB application

Make the Platformatic CLI available

To run a Platformatic DB application, the Platformatic CLI must be available +in the production environment. The most straightforward way of achieving this +is to install it as a project dependency. +This means that when npm install (or npm ci) is run as part of your +build/deployment process, the Platformatic CLI will be installed.

Define an npm run script

A number of hosting services will automatically detect if your project's +package.json has a start npm run script. They will then execute the command +npm start to run your application in production.

You can add platformatic db start as the command for your project's start +npm run script, for example:

{
...
"scripts": {
"start": "platformatic db start",
},
}

Server configuration

info

See the Configuration reference for all +configuration settings.

Configuration with environment variables

We recommend that you use environment variable placeholders +in your Platformatic DB configuration. This will allow you to configure +different settings in your development and production environments.

In development you can set the environment variables via a .env file +that will be automatically loaded by Platformatic DB. For example:

PORT=3042
PLT_SERVER_HOSTNAME=127.0.0.1

In production your hosting provider will typically provide their own mechanism +for setting environment variables.

Configure the server port

Configure the port that the server will listen on by setting an environment +variable placeholder in your Platformatic DB configuration file:

platformatic.db.json
{
"server": {
...
"port": "{PORT}"
},
...
}

Listen on all network interfaces

Most hosting providers require that you configure your server to bind to all +available network interfaces. To do this you must set the server hostname to +0.0.0.0.

This can be handled with an environment variable placeholder in your Platformatic +DB configuration file:

platformatic.db.json
{
"server": {
...
"hostname": "{PLT_SERVER_HOSTNAME}",
},
...
}

The environment variable PLT_SERVER_HOSTNAME should then be set to 0.0.0.0 +in your hosting environment.

Security considerations

We recommend disabling the GraphiQL web UI in production. It can be disabled +with the following configuration:

platformatic.db.json
{
"db": {
...
"graphql": {
"graphiql": false
}
},
...
}

If you want to use this feature in development, replace the configuration +values with environment variable placeholders +so you can set it to true in development and false in production.

Removing the welcome page

If you want to remove the welcome page, you should register an index route.

module.exports = async function (app) {
// removing the welcome page
app.get('/', (req, reply) => {
return { hello: 'world' }
})
}

Databases

Applying migrations

If you're running a single instance of your application in production, it's +best to allow Platformatic DB to automatically run migrations when the server +starts is. This reduces the chance of a currently running instance using a +database structure it doesn't understand while the new version is still being +deployed.

SQLite

When using an SQLite database, you can ensure you don’t commit it to your Git +repository by adding the SQLite database filename to your .gitignore file. +The SQLite database file will be automatically generated by Platformatic DB +when your application migrations are run in production.

+ + + + \ No newline at end of file diff --git a/docs/guides/dockerize-platformatic-app/index.html b/docs/guides/dockerize-platformatic-app/index.html new file mode 100644 index 00000000000..740cc36456f --- /dev/null +++ b/docs/guides/dockerize-platformatic-app/index.html @@ -0,0 +1,20 @@ + + + + + +Dockerize a Platformatic App | Platformatic Open Source Software + + + + + +
+
Version: 0.42.1

Dockerize a Platformatic App

This guide explains how to create a new Platformatic DB app, which connects to a PostgreSQL database.

We will then create a docker-compose.yml file that will run both services in separate containers

Generate a Platformatic DB App

Run this command in your terminal to start the Platformatic creator wizard:

npm create platformatic@latest

This interactive command-line tool will ask you some questions about how you'd +like to set up your new Platformatic project. For this guide, select these options:

- Which kind of project do you want to create?  => DB
- Where would you like to create your project? => quick-start
- Do you want to create default migrations? => Yes
- Do you want to create a plugin? => Yes
- Do you want to use TypeScript? => No
- Do you want to install dependencies? => Yes (this can take a while)
- Do you want to apply the migrations? => Yes
- Do you want to generate types? => Yes
- Do you want to create the github action to deploy this application to Platformatic Cloud dynamic workspace? => No
- Do you want to create the github action to deploy this application to Platformatic Cloud static workspace? => No

Once the wizard is complete, you'll have a Platformatic app project in the +folder quick-start, with example migration files and a plugin script.

info

Make sure you run the npm/yarn/pnpm command install command manually if you +don't ask the wizard to do it for you.

Create Docker image for the Platformatic DB App

In this step you are going to create some files into the root project directory

  • .dockerignore - This file tells Docker to ignore some files when copying the directory into the image filesystem
node_modules
.env*
  • start.sh - This is our entrypoint. We will run migrations then start platformatic
#!/bin/sh

echo "Running migrations..." && \
npx platformatic db migrations apply && \
echo "Starting Platformatic App..." && \
npm start
info

Make sure you make this file executable with the command chmod +x start.sh

  • Dockerfile - This is the file Docker uses to create the image
FROM node:18-alpine
WORKDIR /usr/src/app
COPY . .
RUN npm install
COPY . .
EXPOSE 3042
CMD [ "./start.sh" ]

At this point you can build your Docker image with the command

$ docker build -t platformatic-app .

Create Docker Compose config file

docker-compose.yml is the configuration file for docker-compose which will spin up containers for both PostgresSQL and our Platformatic App

version: "3.3"
services:
postgresql:
ports:
- "5433:5432"
image: "postgres:15-alpine"
environment:
- POSTGRES_PASSWORD=postgres
platformatic:
ports:
- "3042:3042"
image: 'platformatic-app:latest'
depends_on:
- postgresql
links:
- postgresql
environment:
PLT_SERVER_HOSTNAME: ${PLT_SERVER_HOSTNAME}
PORT: ${PORT}
PLT_SERVER_LOGGER_LEVEL: ${PLT_SERVER_LOGGER_LEVEL}
DATABASE_URL: postgres://postgres:postgres@postgresql:5432/postgres

A couple of things to notice:

  • The Platformatic app is started only once the database container is up and running (depends_on).
  • The Platformatic app is linked with postgresql service. Meaning that inside its container ping postgresql will be resolved with the internal ip of the database container.
  • The environment is taken directly from the .env file created by the wizard

You can now run your containers with

$ docker-compose up # (-d if you want to send them in the background)

Everything should start smoothly, and you can access your app pointing your browser to http://0.0.0.0:3042

To stop the app you can either press CTRL-C if you are running them in the foreground, or, if you used the -d flag, run

$ docker-compose down
+ + + + \ No newline at end of file diff --git a/docs/guides/generate-frontend-code-to-consume-platformatic-rest-api/index.html b/docs/guides/generate-frontend-code-to-consume-platformatic-rest-api/index.html new file mode 100644 index 00000000000..c81abd61712 --- /dev/null +++ b/docs/guides/generate-frontend-code-to-consume-platformatic-rest-api/index.html @@ -0,0 +1,32 @@ + + + + + +Generate Front-end Code to Consume Platformatic REST API | Platformatic Open Source Software + + + + + +
+
Version: 0.42.1

Generate Front-end Code to Consume Platformatic REST API

By default, a Platformatic app exposes REST API that provide CRUD (Create, Read, +Update, Delete) functionality for each entity (see the +Introduction to the REST API +documentation for more information on the REST API).

Platformatic CLI allows to auto-generate the front-end code to import in your +front-end application to consume the Platformatic REST API.

This guide

  • Explains how to create a new Platformatic app.
  • Explains how to configure the new Platformatic app.
  • Explains how to create a new React or Vue.js front-end application.
  • Explains how to generate the front-end TypeScript code to consume the Platformatic app REST API.
  • Provide some React and Vue.js components (either of them written in TypeScript) that read, create, and update an entity.
  • Explains how to import the new component in your front-end application.

Create a new Platformatic app

Run this command in your terminal to start the Platformatic creator wizard:

npm create platformatic@latest

This interactive command-line tool will ask you some questions about how you'd +like to set up your new Platformatic project. For this guide, select these options:

- Which kind of project do you want to create?  => DB
- Where would you like to create your project? => quick-start
- Do you want to create default migrations? => Yes
- Do you want to create a plugin? => Yes
- Do you want to use TypeScript? => No
- Do you want to install dependencies? => Yes (this can take a while)
- Do you want to apply the migrations? => Yes
- Do you want to generate types? => Yes
- Do you want to create the github action to deploy this application to Platformatic Cloud dynamic workspace? => No
- Do you want to create the github action to deploy this application to Platformatic Cloud static workspace? => No

Once the wizard is complete, you'll have a Platformatic app project in the +folder quick-start, with example migration files and a plugin script.

info

Make sure you run the npm/yarn/pnpm command install command manually if you +don't ask the wizard to do it for you.

Configure the new Platformatic app

documentation to create a new Platformatic app. Every Platformatic app uses the "Movie" demo entity and includes +the corresponding table, migrations, and REST API to create, read, update, and delete movies.

Once the new Platformatic app is ready:

  • Set up CORS in platformatic.db.json
{
"$schema": "https://platformatic.dev/schemas/v0.24.0/db",
"server": {
"hostname": "{PLT_SERVER_HOSTNAME}",
"port": "{PORT}",
"logger": {
"level": "{PLT_SERVER_LOGGER_LEVEL}"
},
+ "cors": {
+ "origin": {
+ "regexp": "/*/"
+ }
+ }
},
...
}

You can find more details about the cors configuration here.

  • launch Platformatic through npm start. +Then, the Platformatic app should be available at the http://127.0.0.1:3042/ URL.

Create a new Front-end Application

Refer to the Scaffolding Your First Vite Project +documentation to create a new front-end application, and call it "rest-api-frontend".

info

Please note Vite is suggested only for practical reasons, but the bundler of choice does not make any difference.

If you are using npm 7+ you should run

npm create vite@latest rest-api-frontend -- --template react-ts

and then follow the Vite's instructions

Scaffolding project in /Users/noriste/Sites/temp/platformatic/rest-api-frontend...

Done. Now run:

cd rest-api-frontend
npm install
npm run dev

Once done, the front-end application is available at http://localhost:5174/.

Generate the front-end code to consume the Platformatic app REST API

Now that either the Platformatic app and the front-end app are running, go to the front-end codebase and run the Platformatic CLI

cd rest-api-frontend/src
npx platformatic frontend http://127.0.0.1:3042 ts

Refer to the Platformatic CLI frontend command +documentation to know about the available options.

The Platformatic CLI generates

  • api.d.ts: A TypeScript module that includes all the OpenAPI-related types. +Here is part of the generated code
interface GetMoviesRequest {
'limit'?: number;
'offset'?: number;
// ... etc.
}

interface GetMoviesResponseOK {
'id'?: number;
'title': string;
}


// ... etc.

export interface Api {
setBaseUrl(baseUrl: string): void;
getMovies(req: GetMoviesRequest): Promise<Array<GetMoviesResponseOK>>;
createMovie(req: CreateMovieRequest): Promise<CreateMovieResponseOK>;
// ... etc.
}
  • api.ts: A TypeScript module that includes a typed function for every single OpenAPI endpoint. +Here is part of the generated code
import type { Api } from './api-types'

let baseUrl = ''
export function setBaseUrl(newUrl: string) { baseUrl = newUrl };

export const createMovie: Api['createMovie'] = async (request) => {
const response = await fetch(`${baseUrl}/movies/`, {
method:'post',
body: JSON.stringify(request),
headers: {
'Content-Type': 'application/json'
}
})

if (!response.ok) {
throw new Error(await response.text())
}

return await response.json()
}

// etc.

You can add a --name option to the command line to provide a custom name for the generated files.

cd rest-api-frontend/src
npx platformatic frontend --name foobar http://127.0.0.1:3042 ts

will generated foobar.ts and foobar-types.d.ts

React and Vue.js components that read, create, and update an entity

You can copy/paste the following React or Vue.js components that import the code +the Platformatic CLI generated.

Create a new file src/PlatformaticPlayground.tsx and copy/paste the following code.

import { useEffect, useState } from 'react'

// getMovies, createMovie, and updateMovie are all functions automatically generated by Platformatic
// in the `api.ts` module.
import { getMovies, createMovie, updateMovie, setBaseUrl } from './api'

setBaseUrl('http://127.0.0.1:3042') // configure this according to your needs

export function PlatformaticPlayground() {
const [movies, setMovies] = useState<Awaited<ReturnType<typeof getMovies>>>([])
const [newMovie, setNewMovie] = useState<Awaited<ReturnType<typeof createMovie>>>()

async function onCreateMovie() {
const newMovie = await createMovie({ title: 'Harry Potter' })
setNewMovie(newMovie)
}

async function onUpdateMovie() {
if (!newMovie || !newMovie.id) return

const updatedMovie = await updateMovie({ id: newMovie.id, title: 'The Lord of the Rings' })
setNewMovie(updatedMovie)
}

useEffect(() => {
async function fetchMovies() {
const movies = await getMovies({})
setMovies(movies)
}

fetchMovies()
}, [])

return (
<>
<h2>Movies</h2>

{movies.length === 0 ? (
<div>No movies yet</div>
) : (
<ul>
{movies.map((movie) => (
<li key={movie.id}>{movie.title}</li>
))}
</ul>
)}

<button onClick={onCreateMovie}>Create movie</button>
<button onClick={onUpdateMovie}>Update movie</button>

{newMovie && <div>Title: {newMovie.title}</div>}
</>
)
}

Import the new component in your front-end application

You need to import and render the new component in the front-end application.

Change the App.tsx as follows

import { useState } from 'react'
import reactLogo from './assets/react.svg'
import viteLogo from '/vite.svg'
import './App.css'

+import { PlatformaticPlayground } from './PlatformaticPlayground'

function App() {
const [count, setCount] = useState(0)

return (
<>
+ <PlatformaticPlayground />
<div>
<a href="https://vitejs.dev" target="_blank">
<img src={viteLogo} className="logo" alt="Vite logo" />
</a>
<a href="https://react.dev" target="_blank">
<img src={reactLogo} className="logo react" alt="React logo" />
</a>
</div>
<h1>Vite + React</h1>
<div className="card">
<button onClick={() => setCount((count) => count + 1)}>count is {count}</button>
<p>
Edit <code>src/App.tsx</code> and save to test HMR
</p>
</div>
<p className="read-the-docs">Click on the Vite and React logos to learn more</p>
</>
)
}

export default App

Have fun

Art the top of the front-end application the new component requests the movies to the Platformatic app and list them.

Platformatic frontend guide: listing the movies

Click on "Create movie" to create a new movie called "Harry Potter".

Platformatic frontend guide: creating a movie

Click on "Update movie" to rename "Harry Potter" into "Lord of the Rings".

Platformatic frontend guide: editing a movie

Reload the front-end application to see the new "Lord of the Rings" movie listed.

Platformatic frontend guide: listing the movies +.

+ + + + \ No newline at end of file diff --git a/docs/guides/jwt-auth0/index.html b/docs/guides/jwt-auth0/index.html new file mode 100644 index 00000000000..6b4f9dff5a2 --- /dev/null +++ b/docs/guides/jwt-auth0/index.html @@ -0,0 +1,21 @@ + + + + + +Configure JWT with Auth0 | Platformatic Open Source Software + + + + + +
+
Version: 0.42.1

Configure JWT with Auth0

Auth0 is a powerful authentication and authorization service provider that can be integrated with Platformatic DB through JSON Web Tokens (JWT) tokens. +When a user is authenticated, Auth0 creates a JWT token with all necessary security informations and custom claims (like the X-PLATFORMATIC-ROLE, see User Metadata) and signs the token.

Platformatic DB needs the correct public key to verify the JWT signature. +The fastest way is to leverage JWKS, since Auth0 exposes a JWKS endpoint for each tenant. +Given a Auth0 tenant's issuer URL, the (public) keys are accessible at ${issuer}/.well-known/jwks.json. +For instance, if issuer is: https://dev-xxx.us.auth0.com/, the public keys are accessible at https://dev-xxx.us.auth0.com/.well-known/jwks.json

To configure Platformatic DB authorization to use JWKS with Auth0, set:


...
"authorization": {
"jwt": {
"jwks": {
"allowedDomains": [
"https://dev-xxx.us.auth0.com/"
]
}
},
}
...

danger

Note that specify allowedDomains is critical to correctly restrict the JWT that MUST be issued from one of the allowed domains.

Custom Claim Namespace

In Auth0 there are restrictions about the custom claim that can be set on access tokens. One of these is that the custom claims MUST be namespaced, i.e. we cannot have X-PLATFORMATIC-ROLE but we must specify a namespace, e.g.: https://platformatic.dev/X-PLATFORMATIC-ROLE

To map these claims to user metadata removing the namespace, we can specify the namespace in the JWT options:

...
"authorization": {
"jwt": {
"namespace": "https://platformatic.dev/",
"jwks": {
"allowedDomains": [
"https://dev-xxx.us.auth0.com/"
]
}
},
}
...

With this configuration, the https://platformatic.dev/X-PLATFORMATIC-ROLE claim is mapped to X-PLATFORMATIC-ROLE user metadata.

+ + + + \ No newline at end of file diff --git a/docs/guides/migrating-express-app-to-platformatic-service/index.html b/docs/guides/migrating-express-app-to-platformatic-service/index.html new file mode 100644 index 00000000000..aed062610b5 --- /dev/null +++ b/docs/guides/migrating-express-app-to-platformatic-service/index.html @@ -0,0 +1,17 @@ + + + + + +Migrating an Express app to Platformatic Service | Platformatic Open Source Software + + + + + +
+
Version: 0.42.1

Migrating an Express app to Platformatic Service

Introduction

Our open-source tools are built on top of the modern and flexible Fastify web framework. It provides logging, request validation and a powerful plugin system out-of-the-box, as well as incredible performance.

If you have an existing Express application, migrating it to Fastify could potentially be time consuming, and might not be something that you're able to prioritise right now. You can however still take advantage of Fastify and our open-source tools. In this guide you'll learn how to use the @fastify/express plugin to help you rapidly migrate your existing Express application to use Platformatic Service.

This guide assumes that you have some experience building applications with the Express framework.

Example Express application

For the purpose of this guide, we have a basic example Express application. Although this app has a specific structure, the migration steps covered in this guide can generally be applied to any Express application.

The code for the example Express and migrated Platformatic Service applications is available on GitHub.

Here's the structure of the example Express application:

├── app.js
├── package.json
├── routes
│ └── users.js
└── server.js

It has the following dependencies:

// package.json

"dependencies": {
"express": "^4.18.2"
}

The application has routes in routes/users.js:

// routes/users.js

import express from 'express'

const router = express.Router()

router.use(express.json())

router.post('/', function createUser(request, response, next) {
const newUser = request.body

if (!newUser) {
return next(new Error('Error creating user'))
}

response.status(201).json(newUser)
})

router.get('/:user_id', function getUser(request, response, next) {
const user = {
id: Number(request.params.user_id),
first_name: 'Bobo',
last_name: 'Oso'
}

response.json(user)
})

export const usersRoutes = router

In app.js, we have a factory function that creates a new Express server instance and mounts the routes:

// app.js

import express from 'express'

import { usersRoutes } from './routes/users.js'

export default function buildApp() {
const app = express()

app.use('/users', usersRoutes)

return app
}

And in server.js we're calling the factory function and starting the server listening for HTTP requests:

// server.js

import buildApp from './app.js'

const express = buildApp()

express.listen(3042, () => {
console.log('Example app listening at http://localhost:3042')
})

The routes in your Express application should be mounted on an Express router (or multiple routers if needed). This will allow them to be mounted using @fastify/express when you migrate your app to Platformatic Service.

Creating a new Platformatic Service app

To migrate your Express app to Platformatic Service, create a new Platformatic Service app with:

npm create platformatic@latest

Be sure to select Service as the project type. You should also say yes when you're asked if you want to create the GitHub Actions workflows for deploying your application to Platformatic Cloud.

Once the project has been created, you can delete the example plugins and routes directories.

Using ES modules

If you're using ES modules in the Express application code that you'll be migrating, ensure that there's a type field in package.json set to module:

npm pkg set type=module

Migrate the Express routes

Copy over the routes directory from your Express app.

Install @fastify/express

Install the @fastify/express Fastify plugin to add full Express compability to your Platformatic Service app:

npm install @fastify/express

Mounting the Express routes

Create a root Fastify plugin that register's the @fastify/express plugin and loads your Express routes:

// root-plugin.js

import { usersRoutes } from './routes/users.js'

/** @param {import('fastify').FastifyInstance} app */
export default async function (app) {
await app.register(import('@fastify/express'))

app.use('/users', usersRoutes)
}

Configuring the Platformatic Service app

Edit your app's platformatic.service.json to load your root plugin:

// platformatic.service.json

{
...,
"plugins": {
"paths": [{
"path": "./root-plugin.js",
"encapsulate": false
}],
"hotReload": false
},
"watch": false
}

These settings are important when using @fastify/express in a Platformatic Service app:

  • encapsulate — You'll need to disable encapsulation for any Fastify plugin which mounts Express routes. This is due to the way that @fastify/express works.
  • hotReload and watch — You'll need to disable hot reloading and watching for your app, as they don't currently work when using @fastify/express. This is a known issue that we're working to fix.

Wrapping up

You can learn more about building Node.js apps with Platformatic service in the Platformatic Service documentation.

Once you've migrated your Express app to use Platformatic Service with @fastify/express, you might then want to consider fully migrating your Express routes and application code to Fastify. This tutorial shows how you can approach that migration process: How to migrate your app from Express to Fastify (video).

+ + + + \ No newline at end of file diff --git a/docs/guides/migrating-fastify-app-to-platformatic-service/index.html b/docs/guides/migrating-fastify-app-to-platformatic-service/index.html new file mode 100644 index 00000000000..0fde64d0ebb --- /dev/null +++ b/docs/guides/migrating-fastify-app-to-platformatic-service/index.html @@ -0,0 +1,17 @@ + + + + + +Migrating a Fastify app to Platformatic Service | Platformatic Open Source Software + + + + + +
+
Version: 0.42.1

Migrating a Fastify app to Platformatic Service

Introduction

Building production ready Node.js application with Fastify can require a certain amount of boilerplate code. This is a side effect of some of Fastify's technical principles:

  • If it can be a plugin, it should be a pluginPlugins help with the separation of concerns, they improve testability, and also provide a way to logically organise and structure your applications.
  • Developer choice = developer freedom — Fastify only applies a few strong opinions, in key areas such as logging and validation. The framework features have been designed to give you the freedom to build your applications however you want.
  • You know your needs best — Fastify doesn't make assumptions about what plugins you'll need in your application. As the Fastify plugin ecosystem and the community has grown, a clear group of popular plugin choices has emerged.

Platformatic Service is the natural evolution of the build-it-from-scratch Fastify development experience. It provides a solid foundation for building Node.js applications on top of Fastify, with best practices baked in.

See the Building apps with Platformatic Service section of this guide to learn more about the built-in features.

The good news is that the path to migrate a Fastify application to use Platformatic Service is fairly straightforward. This guide covers some of the things you'll need to know when migrating an application, as well as tips on different migration approaches.

This guide assumes that you have some experience building applications with the Fastify framework. If you'd like to learn more about about building web applications with Fastify, we recommend taking a look at:

Example Fastify application

For the purpose of this guide, we have a basic example Fastify application. Although this app has a specific structure, the migration steps covered in this guide can generally be applied to any Fastify application.

The code for the example Fastify and migrated Platformatic Service applications is available on GitHub.

Here's the structure of the example Fastify application:

├── app.js
├── package.json
├── plugins
│   └── data-source.js
├── routes
│   ├── movies.js
│   └── quotes.js
├── server.js
└── test
└── routes.test.js

It has the following dependencies:

// package.json

"dependencies": {
"fastify": "^4.17.0",
"fastify-plugin": "^4.5.0"
}

The application has a plugin that decorates the Fastify server instance, as well as two Fastify plugins which define API routes. Here's the code for them:

// plugins/data-source.js

import fastifyPlugin from 'fastify-plugin'

/** @param {import('fastify').FastifyInstance} app */
async function dataSource (app) {
app.decorate('movies', [
'Jaws',
'Star Wars',
'The Wizard of Oz'
])

app.decorate('quotes', [
'You\'re gonna need a bigger boat.',
'May the Force be with you.',
'Toto, I\'ve got a feeling we\'re not in Kansas anymore.'
])
}

export default fastifyPlugin(dataSource)

fastify-plugin is used to to prevent Fastify from creating a new encapsulation context for the plugin. This makes the decorators that are registered in the dataSource plugin available in the route plugins. You can learn about this fundamental Fastify concept in the Fastify Encapsulation documentation.

// routes/movies.js

/** @param {import('fastify').FastifyInstance} app */
export default async function movieRoutes (app) {
app.get('/', async (request, reply) => {
return app.movies
})
}
// routes/quotes.js

/** @param {import('fastify').FastifyInstance} app */
export default async function quotesRoutes (app) {
app.get('/', async (request, reply) => {
return app.quotes
})
}

The route plugins aren't registering anything that needs to be available in other plugins. They have their own encapsulation context and don't need to be wrapped with fastify-plugin.

There's also a buildApp() factory function in app.js, which takes care of creating a new Fastify server instance and registering the plugins and routes:

// app.js

import fastify from 'fastify'

export async function buildApp (options = {}) {
const app = fastify(options)

app.register(import('./plugins/data-source.js'))

app.register(import('./routes/movies.js'), { prefix: '/movies' })
app.register(import('./routes/quotes.js'), { prefix: '/quotes' })

return app
}

And server.js, which calls the buildApp function to create a new Fastify server, and then starts it listening:

// server.js

import { buildApp } from './app.js'

const port = process.env.PORT || 3042
const host = process.env.HOST || '127.0.0.1'

const options = {
logger: {
level: 'info'
}
}

const app = await buildApp(options)

await app.listen({ port, host })

As well as a couple of tests for the API routes:

// tests/routes.test.js

import { test } from 'node:test'
import assert from 'node:assert/strict'

import { buildApp } from '../app.js'

test('Basic API', async (t) => {
const app = await buildApp()

t.after(async () => {
await app.close()
})

await t.test('GET request to /movies route', async () => {
const response = await app.inject({
method: 'GET',
url: '/movies'
})

assert.equal(response.statusCode, 200)
assert.deepEqual(response.json(), [
'Jaws',
'Star Wars',
'The Wizard of Oz'
])
})

await t.test('GET request to /quotes route', async () => {
const response = await app.inject({
method: 'GET',
url: '/quotes'
})

assert.equal(response.statusCode, 200)
assert.deepEqual(response.json(), [
'You\'re gonna need a bigger boat.',
'May the Force be with you.',
'Toto, I\'ve got a feeling we\'re not in Kansas anymore.'
])
})
})

These tests are using the built in Node.js test runner, node:test. They can be run with the command: node --test --test-reporter=spec test/*.test.js.

The @param lines in this application code are JSDoc blocks that import the FastifyInstance type. This allows many code editors to provide auto-suggest, type hinting and type checking for your code.

Creating a new Platformatic Service app

To migrate your Fastify app to Platformatic Service, create a new Platformatic Service app with:

npm create platformatic@latest

Be sure to select Service as the project type. Once the project has been created, you can delete the example plugins and routes directories.

App configuration

The configuration for the Platformatic Service app is stored in platformatic.service.json.

The generated configuration is set up to load plugins from the plugins and routes directories:

// platformatic.service.json

"plugins": {
"paths": [
"./plugins",
"./routes"
]
}

The value for any configuration setting in platformatic.service.json can be replaced with an environment variable by adding a placeholder, for example {PLT_SERVER_LOGGER_LEVEL}. In development, environment variables are automatically loaded by your Platformatic Service app from a .env file in the root directory of your app. In production, you'll typically set these environment variables using a feature provided by your hosting provider.

See the Platformatic Service documentation for Environment variable placeholders to learn more about how this works.

Using ES modules

If you're using ES modules in the Fastify application code that you'll be migrating, ensure that there's a type field in package.json set to module:

npm pkg set type=module

Refactoring Fastify server factories

If your Fastify application has a script with a factory function to create and build up a Fastify server instance, you can refactor it into a Fastify plugin and use it in your Platformatic Service app.

Here are a few things to consider while refactoring it:

  • Move the options you're passing to Fastify when creating a new server instance to the server block in platformatic.service.json. These options will be passed through directly by Platformatic Service when it creates a Fastify server instance.
  • You can create a root plugin to be loaded by your Platformatic Service app, for example: export default async function rootPlugin (app, options) { ... }
  • When you copy the code from your factory function into your root plugin, remove the code which is creating the Fastify server instance.
  • You can configure your Platformatic Service to load the root plugin, for example:
    "plugins": {
    "paths": ["./root-plugin.js"]
    }
  • If you need to pass options to your root plugin, you can do it like this:
    "plugins": {
    "paths": [
    {
    "path": "./root-plugin.js",
    "options": {
    "someOption": true
    }
    }
    ]
    }

Migrating plugins

Copy over the plugins directory from your Fastify app. You shouldn't need to make any modifications for them to work with Platformatic Service.

Disabling plugin encapsulation

Platformatic Service provides a configuration setting which enables you to disable encapsulation for a plugin, or all the plugins within a directory. This will make any decorators or hooks that you set in those plugins available to all other plugins. This removes the need for you to wrap your plugins with fastify-plugin.

To disable encapsulation for all plugins within the plugins directory, you would set your plugins configuration like this in platformatic.service.json:

// platformatic.service.json

"plugins": {
"paths": [
{
"path": "./plugins",
"encapsulate": false
},
"./routes"
]
}

You can learn more about plugin encapsulation in the Fastify Plugins Guide.

Migrating routes

Copy over the routes directory from your Fastify app.

Explicit route paths

If you're registering routes in your Fastify application with full paths, for example /movies, you won't need to make any changes to your route plugins.

Route prefixing with file-system based routing

If you're using the prefix option when registering route plugins in your Fastify application, for example:

app.register(import('./routes/movies.js'), { prefix: '/movies' })

You can achieve the same result with Platformatic Service by using file-system based routing. With the following directory and file structure:

routes/
├── movies
│   └── index.js
└── quotes
└── index.js

Assuming that both of the route files register a / route, these are the route paths that will be registered in your Platformatic Service app:

/movies
/quotes

With the example Fastify application, this would mean copying the route files over to these places in the Platformatic Service app:

routes/movies.js -> routes/movies/index.js
routes/quotes.js -> routes/quotes/index.js

How does this work? Plugins are loaded with the @fastify/autoload Fastify plugin. The dirNameRoutePrefix plugin option for @fastify/autoload is enabled by default. This means that "routes will be automatically prefixed with the subdirectory name in an autoloaded directory".

If you'd prefer not to use file-system based routing with Platformatic Service, you can add prefixes to the paths for the routes themselves (see Explicit route paths).

Adapting existing usage of @fastify/autoload

If you're using @fastify/autoload in your Fastify application, there are a couple of approaches you can take when migrating the app to Platformatic Service:

  • Configure plugins in your Platformatic Service app's platformatic.service.json. It will then take care of loading your routes and plugins for you with @fastify/autoload (configuration documentation).
  • You can continue to use @fastify/autoload directly with a little refactoring. See the tips in the Refactoring Fastify server factories section.

Migrating tests

You'll generally use the Platformatic CLI to start your Platformatic Service app (npx platformatic start). However for testing, you can use the programmatic API provided by Platformatic Service. This allows you to load your app in your test scripts and then run tests against it.

If you copy over the tests from your existing Fastify app, they will typically only require a small amount of refactoring to work with Platformatic Service.

Replacing your Fastify server factory function

The example Fastify app has a buildApp() factory function which creates a Fastify server instance. The import line for that function can be removed from tests/routes.test.js:

// tests/routes.test.js

import { buildApp } from '../app.js'

And replaced with an import of the buildServer() function from @platformatic/service:

// tests/routes.test.js

import { buildServer } from '@platformatic/service'

You can then load your Platformatic Service app like this:


const app = await buildServer('./platformatic.service.json')

Disabling server logging in your tests

If you have logged enabled for your Platformatic Service app, you'll probably want to disable the logging in your tests to remove noise from the output that you receive when you run your tests.

Instead of passing the path to your app's configuration to buildServer(), you can import the app configuration and disable logging:

// tests/routes.test.js

import serviceConfig from '../platformatic.service.json' assert { type: 'json' }

serviceConfig.server.logger = false

Then pass that serviceConfig configuration object to the buildServer() function:

// tests/routes.test.js

const app = await buildServer(serviceConfig)

Import assertions — the assert { type: 'json' } syntax — are not a stable feature of the JavaScript language, so you'll receive warning messages from Node.js when running your tests. You can disable these warnings by passing the --no-warnings flag to node.

Building apps with Platformatic Service

Because Platformatic Service is built on top of the Fastify framework, you're able to use the full functionality of the Fastify framework in your Platformatic Service app. This includes:

  • Fast, structured logging, provided by Pino
  • Request validation with JSON Schema and Ajv (other validation libraries are supported too)
  • Hooks, which allow fine grained control over when code is run during the request/response lifecycle.
  • Decorators, which allow you to customize core Fastify objects and write more modular code.

Platformatic Service also provides many other features that are built on top of Fastify.

Application features

All Platformatic Service features are fully configurable via platformatic.service.json.

Development features

  • Hot reloading — Your server will automatically reload in development as you develop features.
  • Write your plugins in JavaScript or TypeScript — TypeScript support is provided out-of-the-box and supports hot reloading.
  • Pretty printed logs — Making it easier to understand and debug your application during development.

See the Platformatic Service Configuration documentation for all of the features which can be configured.

Next steps

The documentation for Platformatic Service is a helpful reference when building a Platformatic Service app.

Watch: Understand the parts of a Platformatic app

You want to be confident that you understand how your applications work. In this video you'll learn about the parts that make up a Platformatic application, what each part does, and how they fit together.

Our series of Platformatic How-to videos can help get you up and running building apps with Platformatic open-source tools.

Got questions or need help migrating your Fastify app to use Platformatic Service? Drop by our Discord server and we'll be happy to help you.

+ + + + \ No newline at end of file diff --git a/docs/guides/monitoring/index.html b/docs/guides/monitoring/index.html new file mode 100644 index 00000000000..f255ff9032c --- /dev/null +++ b/docs/guides/monitoring/index.html @@ -0,0 +1,24 @@ + + + + + +Monitoring with Prometheus and Grafana | Platformatic Open Source Software + + + + + +
+
Version: 0.42.1

Monitoring with Prometheus and Grafana

Prometheus is open source system and alerting toolkit for monitoring and alerting. It's a time series database that collects metrics from configured targets at given intervals, evaluates rule expressions, displays the results, and can trigger alerts if some condition is observed to be true. +Grafana is an open source visualization and analytics software.

It's a pretty common solution to use Prometheus to collect and store monitoring data, and Grafana to visualize it.

Platformatic can be configured to expose Prometheus metrics:

...
"metrics": {
"port": 9091,
"auth": {
"username": "platformatic",
"password": "mysecret"
}
}
...

In this case, we are exposing the metrics on port 9091 (defaults to 9090), and we are using basic authentication to protect the endpoint. +We can also specify the IP address to bind to (defaults to 0.0.0.0). +Note that the metrics port is not the default in this configuration. This is because if you want to test the integration running both Prometheus and Platformatic on the same host, Prometheus starts on 9090 port too. +All the configuration settings are optional. To use the default settings, set "metrics": true. See the configuration reference for more details.

caution

Use environment variable placeholders in your Platformatic DB configuration file to avoid exposing credentials.

Prometheus Configuration

This is an example of a minimal Prometheus configuration to scrape the metrics from Platformatic:

global:
scrape_interval: 15s
scrape_timeout: 10s
evaluation_interval: 1m
scrape_configs:
- job_name: 'platformatic'
scrape_interval: 2s
metrics_path: /metrics
scheme: http
basic_auth:
username: platformatic
password: mysecret
static_configs:
- targets: ['192.168.69.195:9091']
labels:
group: 'platformatic'

We specify a target configuring the IP address and the port where Platformatic is running, and we specify the username and password to use for basic authentication. The metrics path is the one used by Platformatic. The ip address is not a loopback address so this will work even with Prometheus running in docker on the same host (see below), please change it to your host ip.

To test this configuration, we can run Prometheus locally using docker and docker-compose, so please be sure to have both correctly installed. +Save the above configuration in a file named ./prometheus/prometheus.yml and create a docker-compose.yml:

version: "3.7"

services:
prometheus:
image: prom/prometheus:latest
volumes:
- prometheus_data:/prometheus
- ./prometheus/prometheus.yml:/etc/prometheus/prometheus.yml
command:
- '--config.file=/etc/prometheus/prometheus.yml'
ports:
- '9090:9090'

volumes:
prometheus_data: {}

Then run docker-compose up -d and open http://localhost:9090 in your browser. You should see the Prometheus dashboard, and you can also query the metrics, e.g. {group="platformatic"}. See Prometheus docs for more information on querying and metrics.

Grafana Configuration

Let's see how we can configure Grafana to chart some Platformatics metrics from Prometheus. +Change the docker-compose.yml to add a grafana service:

version: "3.7"
services:

prometheus:
image: prom/prometheus:latest
volumes:
- prometheus_data:/prometheus
- ./prometheus/prometheus.yml:/etc/prometheus/prometheus.yml
command:
- '--config.file=/etc/prometheus/prometheus.yml'
ports:
- '9090:9090'

grafana:
image: grafana/grafana:latest
volumes:
- grafana_data:/var/lib/grafana
environment:
- GF_SECURITY_ADMIN_PASSWORD=pleasechangeme
depends_on:
- prometheus
ports:
- '3000:3000'

volumes:
prometheus_data: {}
grafana_data: {}

In Grafana, select Configuration -> Data Sources -> Add Data Source, and select Prometheus. +In the URL field, specify the URL of the Prometheus server, e.g. http://prometheus:9090 (the name of the service in the docker-compose file), then Save & Test.

Now we can create a dashboard and add panels to it. Select the Prometheus data source, and add queries. You should see the metrics exposed by Platformatic.

It's also possible to import pre-configured dashboards, like this one from Grafana.com.

+ + + + \ No newline at end of file diff --git a/docs/guides/packaging-an-application-as-a-module/index.html b/docs/guides/packaging-an-application-as-a-module/index.html new file mode 100644 index 00000000000..7493577838e --- /dev/null +++ b/docs/guides/packaging-an-application-as-a-module/index.html @@ -0,0 +1,27 @@ + + + + + +Packaging a Platformatic Application as a module | Platformatic Open Source Software + + + + + +
+
Version: 0.42.1

Packaging a Platformatic Application as a module

Platformatic Service and Platformatic DB +offer a good starting point to create new applications. However, most developers or organizations might want to +create reusable services or applications built on top of Platformatic. +This is useful to publish the application on the public npm registry (or a private one!), including building your own CLI, +or to create a specialized template for your organization to allow for centralized bugfixes and updates.

This process is the same one we use to maintain Platformatic DB and Platformatic Composer on top of Platformatic Service.

Creating a custom Service

We are creating the module foo.js as follows:

const { schema, platformaticService } = require('@platformatic/service')

/** @type {import('fastify').FastifyPluginAsync<{}>} */
async function foo (app, opts) {
const text = app.platformatic.config.foo.text
app.get('/foo', async (request, reply) => {
return text
})

await platformaticService(app, opts)
}

foo.configType = 'foo'

// break Fastify encapsulation
foo[Symbol.for('skip-override')] = true

// The schema for our configuration file
foo.schema = {
$id: 'https://example.com/schemas/foo.json',
title: 'Foo Service',
type: 'object',
properties: {
server: schema.server,
plugins: schema.plugins,
metrics: schema.metrics,
watch: {
anyOf: [schema.watch, {
type: 'boolean'
}, {
type: 'string'
}]
},
$schema: {
type: 'string'
},
module: {
type: 'string'
},
foo: {
type: 'object',
properties: {
text: {
type: 'string'
}
},
required: ['text']
}
},
additionalProperties: false,
required: ['server']
}

// The configuration for the ConfigManager
foo.configManagerConfig = {
schema: foo.schema,
envWhitelist: ['PORT', 'HOSTNAME'],
allowToWatch: ['.env'],
schemaOptions: {
useDefaults: true,
coerceTypes: true,
allErrors: true,
strict: false
}
}

module.exports = foo

Note that the $id property of the schema identifies the module in our system, +allowing us to retrieve the schema correctly. +It is recommended, but not required, that the JSON schema is actually +published in this location. Doing so allows tooling such as the VSCode +language server to provide autocompletion.

In this example, the schema adds a custom top-level foo property +that users can use to configure this specific module.

ESM is also supported.

Consuming a custom application

Consuming foo.js is simple. We can create a platformatic.json file as follows:

{
"$schema": "https://example.com/schemas/foo.json",
"module": "./foo",
"server": {
"port": 0,
"hostname": "127.0.0.1"
},
"foo": {
"text": "Hello World"
}
}

Note that we must specify both the $schema property and module. +Module can also be any modules published on npm and installed via your package manager.

Building your own CLI

It is possible to build your own CLI with the following cli.mjs file:

import foo from './foo.js'
import { start } from '@platformatic/service'
import { printAndExitLoadConfigError } from '@platformatic/config'

await start(foo, process.argv.splice(2)).catch(printConfigValidationErrors)

This will also load platformatic.foo.json files.

+ + + + \ No newline at end of file diff --git a/docs/guides/prisma/index.html b/docs/guides/prisma/index.html new file mode 100644 index 00000000000..bafd5be86c6 --- /dev/null +++ b/docs/guides/prisma/index.html @@ -0,0 +1,17 @@ + + + + + +Integrate Prisma with Platformatic DB | Platformatic Open Source Software + + + + + +
+
Version: 0.42.1

Integrate Prisma with Platformatic DB

Prisma is an open-source ORM for Node.js and TypeScript. It is used as an alternative to writing SQL, or using another database access tool such as SQL query builders (like knex.js) or ORMs (like TypeORM and Sequelize). Prisma currently supports PostgreSQL, MySQL, SQL Server, SQLite, MongoDB, and CockroachDB.

Prisma can be used with JavaScript or TypeScript, and provides a level to type-safety that goes beyond the guarantees made by other ORMs in the TypeScript ecosystem. You can find an in-depth comparison of Prisma against other ORMs here.

If you want to get a quick overview of how Prisma works, you can follow the Quickstart or read the Introduction in the Prisma documentation.

How Prisma can improve your workflow with Platformatic DB

While Platformatic speeds up development of your REST and GraphQL APIs, Prisma can complement the workflow in several ways:

  1. Provides an intuitive data modeling language
  2. Provides auto-generated and customizable SQL migrations
  3. Provides type-safety and auto-completion for your database queries

You can learn more about why Prisma and Platformatic are a great match this article.

Prerequisites

To follow along with this guide, you will need to have the following:

Setup Prisma

Install the Prisma CLI and the db-diff development dependencies in your project:

npm install --save-dev prisma @ruheni/db-diff

Next, initialize Prisma in your project

npx prisma init

This command does the following:

  • Creates a new directory called prisma which contains a file called schema.prisma. This file defines your database connection and the Prisma Client generator.
  • Creates a .env file at the root of your project if it doesn't exist. This defines your environment variables (used for your database connection).

You can specify your preferred database provider using the --datasource-provider flag, followed by the name of the provider:

npx prisma init --datasource-provider postgresql # or sqlite, mysql, sqlserver, cockroachdb

Prisma uses the DATABASE_URL environment variable to connect to your database to sync your database and Prisma schema. It also uses the variable to connect to your database to run your Prisma Client queries.

If you're using PostgreSQL, MySQL, SQL Server, or CockroachDB, ensure that the DATABASE_URL used by Prisma is the same as the one used by Platformatic DB project. If you're using SQLite, refer to the Using Prisma with SQLite section.

If you have an existing project, refer to the Adding Prisma to an existing Platformatic DB project section. If you're adding Prisma to a new project, refer to the Adding Prisma to a new project.

Adding Prisma to an existing project

If you have an existing Platformatic DB project, you can introspect your database and generate the data model in your Prisma schema with the following command:

npx prisma db pull

The command will introspect your database and generate the data model

Next, add the @@ignore attribute to the versions model to exclude it from the Prisma Client API:

model versions {
version BigInt @id
name String?
md5 String?
run_at DateTime? @db.Timestamptz(6)

+ @@ignore
}

To learn how you can evolve your database schema, you can jump to the Evolving your database schema section.

Adding Prisma to a new project

Define a Post model with the following fields at the end of your schema.prisma file:

prisma/schema.prisma
model Post {
id Int @id @default(autoincrement())
title String
content String?
published Boolean @default(false)
viewCount Int @default(0)
createdAt DateTime @default(now())

@@map("posts")
}

The snippet above defines a Post model with the following fields and properties:

  • id: An auto-incrementing integer that will be the primary key for the model.
  • title: A non-nullable String field.
  • content: A nullable String field.
  • published: A Boolean field with a default value of false.
  • viewCount: An Int field with a default value of 0.
  • createdAt: A DateTime field with a timestamp of when the value is created as its default value.

By default, Prisma maps the model name and its format to the table name — which is also used im Prisma Client. Platformatic DB uses a snake casing and pluralized table names to map your table names to the generated API. The @@map() attribute in the Prisma schema allows you to define the name and format of your table names to be used in your database. You can also use the @map() attribute to define the format for field names to be used in your database. Refer to the Foreign keys and table names naming conventions section to learn how you can automate formatting foreign keys and table names.

Next, run the following command to generate an up and down migration:

npx db-diff

The previous command will generate both an up and down migration based on your schema. The generated migration is stored in your ./migrations directory. If you are currently using a different path to store the migration, you can provide the --migrations-dir flag followed by the path.

You can then apply the generated migration using the Platformatic DB CLI:

npx platformatic db migrations apply

Platformatic uses Postgrator to run migrations. Postgrator creates a table in the database called versions to track the applied migrations. Since the versions table is not yet captured in the Prisma schema, run the following command to introspect the database and populate it with the missing model:

npx prisma db pull

Introspecting the database to populate the model prevents including the versions table in the generated down migrations.

Your Prisma schema should now contain a versions model that is similar to this one (it will vary depending on the database system you're using):

model Post {
id Int @id @default(autoincrement())
title String
content String?
published Boolean @default(false)
viewCount Int @default(0)
createdAt DateTime @default(now())

@@map("posts")
}

+model versions {
+ version BigInt @id
+ name String?
+ md5 String?
+ run_at DateTime? @db.Timestamptz(6)
+}

Add the @@ignore attribute function to the model to exclude it from the Prisma Client API:

model versions {
version BigInt @id
name String?
md5 String?
run_at DateTime? @db.Timestamptz(6)

+ @@ignore
}

Evolving your database schema

Update the data model in your Prisma schema by adding a model or a field:

// based on the schema in the "Adding Prisma to a new project" section
+model User {
+ id Int @id @default(autoincrement())
+ email String @unique
+ name String?
+ posts Post[]
+
+ @@map("users")
+}

model Post {
id Int @id @default(autoincrement())
createdAt DateTime @default(now())
title String
content String?
published Boolean @default(false)
viewCount Int @default(0)
+ author User? @relation(fields: [authorId], references: [id])
+ authorId Int? @map("author_id")

@@map("posts")
}

Next, use the @ruheni/db-diff CLI tool to generate up and down migrations:

npx db-diff

This command will generate up and down migrations based off of your Prisma schema. If you are currently using a different path to store the migration, you can provide the --migrations-dir flag followed by the path.

Next, apply the generated migration using the Platformatic CLI:

npx platformatic db migrations apply

And you're done!

Using Prisma Client in your plugins

Plugins allow you to add custom functionality to your REST and GraphQL API. Refer to the Add Custom Functionality to learn more how you can add custom functionality.

danger

Prisma Client usage with Platformatic is currently only supported in Node v18

You can use Prisma Client to interact with your database in your plugin.

To get started, run the following command:

npx prisma generate

The above command installs the @prisma/client in your project and generates a Prisma Client based off of your Prisma schema.

Install @sabinthedev/fastify-prisma fastify plugin. The plugin takes care of shutting down database connections and makes Prisma Client available as a Fastify plugin.

npm install @sabinthedev/fastify-prisma

Register the plugin and extend your REST API:

// 1.
const prismaPlugin = require("@sabinthedev/fastify-prisma")

module.exports = async (app) => {
app.log.info('plugin loaded')

// 2.
app.register(prismaPlugin)

/**
* Plugin logic
*/
// 3.
app.put('/post/:id/views', async (req, reply) => {

const { id } = req.params

// 4.
const post = await app.prisma.post.update({
where: {
id: Number(id)
},
data: {
viewCount: {
increment: 1
}
}
})

// 5.
return reply.send(post)
})
}

The snippet does the following:

  1. Imports the plugin
  2. Registers the @sabinthedev/fastify-prisma
  3. Defines the endpoint for incrementing the views of a post
  4. Makes a query to the database on the Post model to increment a post's view count
  5. Returns the updated post on success

If you would like to extend your GraphQL API, extend the schema and define the corresponding resolver:

plugin.js
// ./plugin.js
const prismaPlugin = require("@sabinthedev/fastify-prisma")

module.exports = async (app) => {
app.log.info('plugin loaded')

app.graphql.extendSchema(`
extend type Mutation {
incrementPostViewCount(id: ID): Post
}
`)

app.graphql.defineResolvers({
Mutation: {
incrementPostViewCount: async (_, { id }) => {
const post = await prisma.post.update({
where: {
id: Number(id)
},
data: {
viewCount: {
increment: 1
}
}
})

if (!post) throw new Error(`Post with id:${id} was not found`)
return post
}
}
})
}

Start the server:

npx platformatic db start

The query should now be included in your GraphQL schema.

You can also use the Prisma Client in your REST API endpoints.

Workarounds

Using Prisma with SQLite

Currently, Prisma doesn't resolve the file path of a SQLite database the same way as Platformatic does.

If your database is at the root of the project, create a new environment variable that Prisma will use called PRISMA_DATABASE_URL:

# .env
DATABASE_URL="sqlite://db.sqlite"
PRISMA_DATABASE_URL="file:../db.sqlite"

Next, update the url value in the datasource block in your Prisma schema with the updated value:

prisma/schema.prisma
// ./prisma/schema.prisma
datasource db {
provider = "sqlite"
url = env("PRISMA_DATABASE_URL")
}

Running migrations should now work smoothly and the path will be resolved correctly.

Foreign keys, field, and table names naming conventions

Foreign key names should use underscores, e.g. author_id, for Platformatic DB to correctly map relations. You can use the @map("") attribute to define the names of your foreign keys and field names to be defined in the database.

Table names should be mapped to use the naming convention expected by Platformatic DB e.g. @@map("recipes") (the Prisma convention is Recipe, which corresponds with the model name).

You can use prisma-case-format to enforce your own database conventions, i.e., pascal, camel, and snake casing.

Learn more

If you would like to learn more about Prisma, be sure to check out the Prisma docs.

+ + + + \ No newline at end of file diff --git a/docs/guides/securing-platformatic-db/index.html b/docs/guides/securing-platformatic-db/index.html new file mode 100644 index 00000000000..88d9b26c913 --- /dev/null +++ b/docs/guides/securing-platformatic-db/index.html @@ -0,0 +1,31 @@ + + + + + +Securing Platformatic DB with Authorization | Platformatic Open Source Software + + + + + +
+
Version: 0.42.1

Securing Platformatic DB with Authorization

Introduction

Authorization in Platformatic DB is role-based. User authentication and the +assignment of roles must be handled by an external authentication service. +Take a look to at the reference documentation for Authorization.

The goal of this simple guide is to protect an API built with Platformatic DB +with the use of a shared secret, that we call adminSecret. We want to prevent +any user that is not an admin to access the data.

The use of an adminSecret is a simplistic way of securing a system. +It is a crude way for limiting access and not suitable for production systems, +as the risk of leaking the secret is high in case of a security breach. +A production friendly way would be to issue a machine-to-machine JSON Web Token, +ideally with an asymmetric key. Alternatively, you can defer to an external +service via a Web Hook.

Please refer to our guide to set up Auth0 for more information +on JSON Web Tokens.

Block access to all entities, allow admins

The following configuration will block all anonymous users (e.g. each user without a known role) +to access every entity:

{
...
"authorization": {
"adminSecret": "replaceWithSomethingRandomAndSecure"
}
}

The data will still be available if the X-PLATFORMATIC-ADMIN-SECRET HTTP header +is specified when making HTTP calls, like so:

curl -H 'X-PLATFORMATIC-ADMIN-SECRET: replaceWithSomethingRandomAndSecure' http://127.0.0.1:3042/pages
info

Configuring JWT or Web Hooks will have the same result of configuring an admin secret.

Authorization rules

Rules can be provided based on entity and role in order to restrict access and provide fine grained access. +To make an admin only query and save the page table / page entity using adminSecret this structure should be used in the platformatic.db configuration file:

  ...
"authorization": {
"adminSecret": "easy",
"rules": [{
"entity": "movie"
"role": "platformatic-admin",
"find": true,
"save": true,
"delete": false,
}
]
}
info

Note that the role of an admin user from adminSecret strategy is platformatic-admin by default.

Read-only access to anonymous users

The following configuration will allo all anonymous users (e.g. each user without a known role) +to access the pages table / page entity in Read-only mode:

{
...
"authorization": {
"adminSecret": "replaceWithSomethingRandomAndSecure"
"rules": [{
"role": "anonymous",
"entity": "page",
"find": true,
"save": false,
"delete": false
}]
}
}

Note that we set find as true to allow the access, while the other options are false.

Work in Progress

This guide is a Work-In-Progress. Let us know what other common authorization use cases we should cover.

+ + + + \ No newline at end of file diff --git a/docs/guides/seed-a-database/index.html b/docs/guides/seed-a-database/index.html new file mode 100644 index 00000000000..8d36456ec2b --- /dev/null +++ b/docs/guides/seed-a-database/index.html @@ -0,0 +1,21 @@ + + + + + +Seed a Database | Platformatic Open Source Software + + + + + +
+
Version: 0.42.1

Seed a Database

A database is as useful as the data that it contains: a fresh, empty database +isn't always the best starting point. We can add a few rows from our migrations +using SQL, but we might need to use JavaScript from time to time.

The platformatic db seed command allows us to run a +script that will populate — or "seed" — our database.

Example

Our seed script should export a Function that accepts an argument: +an instance of @platformatic/sql-mapper.

seed.js
'use strict'

module.exports = async function ({ entities, db, sql }) {
await entities.graph.save({ input: { name: 'Hello' } })
await db.query(sql`
INSERT INTO graphs (name) VALUES ('Hello 2');
`)
}

We can then run the seed script with the Platformatic CLI:

npx platformatic db seed seed.js
+ + + + \ No newline at end of file diff --git a/docs/guides/telemetry/index.html b/docs/guides/telemetry/index.html new file mode 100644 index 00000000000..ecf0b6bfe8c --- /dev/null +++ b/docs/guides/telemetry/index.html @@ -0,0 +1,21 @@ + + + + + +Telemetry with Jaeger | Platformatic Open Source Software + + + + + +
+
Version: 0.42.1

Telemetry with Jaeger

Introduction

Platformatic supports Open Telemetry integration. This allows you to send telemetry data to one of the OTLP compatible servers (see here) or to a Zipkin server. Let's show this with Jaeger.

Jaeger setup

The quickest way is to use docker:

docker run -d --name jaeger \
-e COLLECTOR_OTLP_ENABLED=true \
-p 16686:16686 \
-p 4317:4317 \
-p 4318:4318 \
jaegertracing/all-in-one:latest

Check that the server is running by opening http://localhost:16686/ in your browser.

Platformatic setup

Will test this with a Platformatic Composer that proxy requests to a Platformatic Service, which in turn invokes a Platformatic DB Service. +In this way we show that the telemetry is propagated from the Composer throughout the services and the collected correctly. +Let's setup all this components:

Platformatic DB Service

Create a folder for DB and cd into it:

mkdir test-db
cd test-db

Then create a db in the folder using npx create-platformatic@latest:

npx create-platformatic@latest

To make it simple, use sqlite and create/apply the default migrations. This DB Service is exposed on port 5042:


➜ npx create-platformatic@latest

Hello user, welcome to Platformatic 0.32.0!
Let's start by creating a new project.
? Which kind of project do you want to create? DB
? Where would you like to create your project? .
? What database do you want to use? SQLite
? Do you want to use the connection string "sqlite://./db.sqlite"? Confirm
? Do you want to create default migrations? yes
? Do you want to create a plugin? no
? Do you want to use TypeScript? no
? What port do you want to use? 5042
[15:40:46] INFO: Configuration file platformatic.db.json successfully created.
[15:40:46] INFO: Environment file .env successfully created.
[15:40:46] INFO: Migrations folder migrations successfully created.
[15:40:46] INFO: Migration file 001.do.sql successfully created.
[15:40:46] INFO: Migration file 001.undo.sql successfully created.
[15:40:46] INFO: Plugin file created at plugin.js
? Do you want to run npm install? no
? Do you want to apply migrations? yes
...done!
? Do you want to generate types? no
? Do you want to create the github action to deploy this application to Platformatic Cloud dynamic workspace? no
? Do you want to create the github action to deploy this application to Platformatic Cloud static workspace? no

All done! Please open the project directory and check the README.
Will test this in one example with a Platformatic Composer that proxy requests to a Platformatic Service, which in turn invokes a Platformatic DB.

Open the platformatic.db.json file and add the telementry configuration:

  "telemetry": {
"serviceName": "test-db",
"exporter": {
"type": "otlp",
"options": {
"url": "http://localhost:4318/v1/traces"
}
}
}

Finally, start the DB service:

npx platformatic db start

Platformatic Service

Create at the same level of test-db another folder for Service and cd into it:

mkdir test-service
cd test-service

Then create a service on the 5043 port in the folder using npx create-platformatic@latest:

➜ npx create-platformatic@latest

Hello user, welcome to Platformatic 0.32.0!
Let's start by creating a new project.
? Which kind of project do you want to create? Service
? Where would you like to create your project? .
? Do you want to run npm install? no
? Do you want to use TypeScript? no
? What port do you want to use? 5043
[15:55:35] INFO: Configuration file platformatic.service.json successfully created.
[15:55:35] INFO: Environment file .env successfully created.
[15:55:35] INFO: Plugins folder "plugins" successfully created.
[15:55:35] INFO: Routes folder "routes" successfully created.
? Do you want to create the github action to deploy this application to Platformatic Cloud dynamic workspace? no
? Do you want to create the github action to deploy this application to Platformatic Cloud static workspace? no

Open the platformatic.service.json file and add the following telemetry configuration (it's exactly the same as DB, but with a different serviceName)

  "telemetry": {
"serviceName": "test-service",
"exporter": {
"type": "otlp",
"options": {
"url": "http://localhost:4318/v1/traces"
}
}
}

We want this service to invoke the DB service, so we need to add a client for test-db to it:

npx platformatic client http://127.0.0.1:5042 js --name movies

Check platformatic.service.json to see that the client has been added (PLT_MOVIES_URL is defined in .env):

    "clients": [
{
"schema": "movies/movies.openapi.json",
"name": "movies",
"type": "openapi",
"url": "{PLT_MOVIES_URL}"
}
]

Now open routes/root.js and add the following:

  fastify.get('/movies-length', async (request, reply) => {
const movies = await request.movies.getMovies()
return { length: movies.length }
})

This code calls movies to get all the movies and returns the length of the array.

Finally, start the service:

npx platformatic service start

Platformatic Composer

Create at the same level of test-db and test-service another folder for Composer and cd into it:

mkdir test-composer
cd test-composer

Then create a composer on the 5044 port in the folder using npx create-platformatic@latest:

➜ npx create-platformatic@latest

Hello marcopiraccini, welcome to Platformatic 0.32.0!
Let's start by creating a new project.
? Which kind of project do you want to create? Composer
? Where would you like to create your project? .
? What port do you want to use? 5044
? Do you want to run npm install? no
[16:05:28] INFO: Configuration file platformatic.composer.json successfully created.
[16:05:28] INFO: Environment file .env successfully created.
? Do you want to create the github action to deploy this application to Platformatic Cloud dynamic workspace? no
? Do you want to create the github action to deploy this application to Platformatic Cloud static workspace? no

All done! Please open the project directory and check the README.

Open platformatic.composer.js and change it to the following:

{
"$schema": "https://platformatic.dev/schemas/v0.32.0/composer",
"server": {
"hostname": "{PLT_SERVER_HOSTNAME}",
"port": "{PORT}",
"logger": {
"level": "{PLT_SERVER_LOGGER_LEVEL}"
}
},
"composer": {
"services": [
{
"id": "example",
"origin": "http://127.0.0.1:5043",
"openapi": {
"url": "/documentation/json"
}
}
],
"refreshTimeout": 3000
},
"telemetry": {
"serviceName": "test-composer",
"exporter": {
"type": "otlp",
"options": {
"url": "http://localhost:4318/v1/traces"
}
}
},
"watch": true
}

Note that we just added test-service as origin of the proxed service and added the usual telementry configuration, with a different serviceName.

Finally, start the composer:

npx platformatic composer start

Run the Test

Check that the composer is exposing movies-length opening: http://127.0.0.1:5044/documentation/

You should see: +image

To add some data, we can POST directly to the DB service (port 5042):

curl -X POST -H "Content-Type: application/json" -d '{"title":"The Matrix"}' http://127.0.0.1:5042/movies 
curl -X POST -H "Content-Type: application/json" -d '{"title":"The Matrix Reloaded"}' http://127.0.0.1:5042/movies

Now, let's check that the composer (port 5044) is working:

curl http://127.0.0.1:5044/movies-length

If the composer is working correctly, you should see:

{"length":2}

However, the main interest of this example is to show how to use the Platformatic Telemetry, so let's check it. +Open the Jaeger UI at http://localhost:16686/ and you should see something like this:

image

Select on the left the test-composer service and the GET /movies-length operation, click on "Find traces" and you should see something like this:

image

You can then click on the trace and see the details:

image

Note that everytime a request is received or client call is done, a new span is started. So we have:

  • One span for the request received by the test-composer
  • One span for the client call to test-service
  • One span for the request received by test-service
  • One span for the client call to test-db
  • One span for the request received by test-db

All these spans are linked together, so you can see the whole trace.

What if you want to use Zipkin?

Starting from this example, it's also possible to run the same test using Zipkin. To do so, you need to start the Zipkin server:

docker run -d -p 9411:9411 openzipkin/zipkin

Then, you need to change the telemetry configuration in all the platformatic.*.json to the following (only the exporter object is different`)

  "telemetry": {
(...)
"exporter": {
"type": "zipkin",
"options": {
"url": "http://127.0.0.1:9411/api/v2/spans"
}
}
}

The zipkin ui is available at http://localhost:9411/

+ + + + \ No newline at end of file diff --git a/docs/next/category/getting-started/index.html b/docs/next/category/getting-started/index.html new file mode 100644 index 00000000000..19d8aeaaed9 --- /dev/null +++ b/docs/next/category/getting-started/index.html @@ -0,0 +1,17 @@ + + + + + +Getting Started | Platformatic Open Source Software + + + + + + + + + + \ No newline at end of file diff --git a/docs/next/category/guides/index.html b/docs/next/category/guides/index.html new file mode 100644 index 00000000000..2e56a125156 --- /dev/null +++ b/docs/next/category/guides/index.html @@ -0,0 +1,17 @@ + + + + + +Guides | Platformatic Open Source Software + + + + + +
+
Version: Next

Guides

+ + + + \ No newline at end of file diff --git a/docs/next/category/packages/index.html b/docs/next/category/packages/index.html new file mode 100644 index 00000000000..71ab3bb8682 --- /dev/null +++ b/docs/next/category/packages/index.html @@ -0,0 +1,17 @@ + + + + + +Packages | Platformatic Open Source Software + + + + + + + + + + \ No newline at end of file diff --git a/docs/next/category/platformatic-cloud/index.html b/docs/next/category/platformatic-cloud/index.html new file mode 100644 index 00000000000..628680e2ad5 --- /dev/null +++ b/docs/next/category/platformatic-cloud/index.html @@ -0,0 +1,17 @@ + + + + + +Platformatic Cloud | Platformatic Open Source Software + + + + + + + + + + \ No newline at end of file diff --git a/docs/next/category/reference/index.html b/docs/next/category/reference/index.html new file mode 100644 index 00000000000..6711b15d65a --- /dev/null +++ b/docs/next/category/reference/index.html @@ -0,0 +1,17 @@ + + + + + +Reference | Platformatic Open Source Software + + + + + + + + + + \ No newline at end of file diff --git a/docs/next/contributing/documentation-style-guide/index.html b/docs/next/contributing/documentation-style-guide/index.html new file mode 100644 index 00000000000..1a148c86439 --- /dev/null +++ b/docs/next/contributing/documentation-style-guide/index.html @@ -0,0 +1,74 @@ + + + + + +Documentation Style Guide | Platformatic Open Source Software + + + + + +
+
Version: Next

Documentation Style Guide

Welcome to the Platformatic Documentation Style Guide. This guide is here to provide +you with a conventional writing style for users writing developer documentation on +our Open Source framework. Each topic is precise and well explained to help you write +documentation users can easily understand and implement.

Who is This Guide For?

This guide is for anyone who loves to build with Platformatic or wants to contribute +to our documentation. You do not need to be an expert in writing technical +documentation. This guide is here to help you.

Visit CONTRIBUTING.md +file on GitHub to join our Open Source folks.

Before you Write

You should have a basic understanding of:

  • JavaScript
  • Node.js
  • Git
  • GitHub
  • Markdown
  • HTTP
  • NPM

Consider Your Audience

Before you start writing, think about your audience. In this case, your audience +should already know HTTP, JavaScript, NPM, and Node.js. It is necessary to keep +your readers in mind because they are the ones consuming your content. You want +to give as much useful information as possible. Consider the vital things they +need to know and how they can understand them. Use words and references that +readers can relate to easily. Ask for feedback from the community, it can help +you write better documentation that focuses on the user and what you want to +achieve.

Get Straight to the Point

Give your readers a clear and precise action to take. Start with what is most +important. This way, you can help them find what they need faster. Mostly, +readers tend to read the first content on a page, and many will not scroll +further.

Example

Less like this:

Colons are very important to register a parametric path. It lets +the framework know there is a new parameter created. You can place the colon +before the parameter name so the parametric path can be created.

More Like this:

To register a parametric path, put a colon before the parameter +name. Using a colon lets the framework know it is a parametric path and not a +static path.

Images and Video Should Enhance the Written Documentation

Images and video should only be added if they complement the written +documentation, for example to help the reader form a clearer mental model of a +concept or pattern.

Images can be directly embedded, but videos should be included by linking to an +external site, such as YouTube. You can add links by using +[Title](https://www.websitename.com) in the Markdown.

Avoid Plagiarism

Make sure you avoid copying other people's work. Keep it as original as +possible. You can learn from what they have done and reference where it is from +if you used a particular quote from their work.

Word Choice

There are a few things you need to use and avoid when writing your documentation +to improve readability for readers and make documentation neat, direct, and +clean.

When to use the Second Person "you" as the Pronoun

When writing articles or guides, your content should communicate directly to +readers in the second person ("you") addressed form. It is easier to give them +direct instruction on what to do on a particular topic. To see an example, visit +the Quick Start Guide.

Example

Less like this:

We can use the following plugins.

More like this:

You can use the following plugins.

According to Wikipedia, You is usually a second person pronoun. +Also, used to refer to an indeterminate person, as a more common alternative +to a very formal indefinite pronoun.

To recap, use "you" when writing articles or guides.

When to Avoid the Second Person "you" as the Pronoun

One of the main rules of formal writing such as reference documentation, or API +documentation, is to avoid the second person ("you") or directly addressing the +reader.

Example

Less like this:

You can use the following recommendation as an example.

More like this:

As an example, the following recommendations should be +referenced.

To view a live example, refer to the Decorators +reference document.

To recap, avoid "you" in reference documentation or API documentation.

Avoid Using Contractions

Contractions are the shortened version of written and spoken forms of a word, +i.e. using "don't" instead of "do not". Avoid contractions to provide a more +formal tone.

Avoid Using Condescending Terms

Condescending terms are words that include:

  • Just
  • Easy
  • Simply
  • Basically
  • Obviously

The reader may not find it easy to use Platformatic; avoid +words that make it sound simple, easy, offensive, or insensitive. Not everyone +who reads the documentation has the same level of understanding.

Starting With a Verb

Mostly start your description with a verb, which makes it simple and precise for +the reader to follow. Prefer using present tense because it is easier to read +and understand than the past or future tense.

Example

Less like this:

There is a need for Node.js to be installed before you can be +able to use Platformatic.

More like this:

Install Node.js to make use of Platformatic.

Grammatical Moods

Grammatical moods are a great way to express your writing. Avoid sounding too +bossy while making a direct statement. Know when to switch between indicative, +imperative, and subjunctive moods.

Indicative - Use when making a factual statement or question.

Example

Since there is no testing framework available, "Platformatic recommends ways +to write tests".

Imperative - Use when giving instructions, actions, commands, or when you +write your headings.

Example

Install dependencies before starting development.

Subjunctive - Use when making suggestions, hypotheses, or non-factual +statements.

Example

Reading the documentation on our website is recommended to get +comprehensive knowledge of the framework.

Use Active Voice Instead of Passive

Using active voice is a more compact and direct way of conveying your +documentation.

Example

Passive:

The node dependencies and packages are installed by npm.

Active:

npm installs packages and node dependencies.

Writing Style

Documentation Titles

When creating a new guide, API, or reference in the /docs/ directory, use +short titles that best describe the topic of your documentation. Name your files +in kebab-cases and avoid Raw or camelCase. To learn more about kebab-case you +can visit this medium article on Case +Styles.

Examples:

hook-and-plugins.md

adding-test-plugins.md

removing-requests.md

Hyperlinks should have a clear title of what it references. Here is how your +hyperlink should look:

<!-- More like this -->

// Add clear & brief description
[Fastify Plugins] (https://www.fastify.io/docs/latest/Plugins/)

<!--Less like this -->

// incomplete description
[Fastify] (https://www.fastify.io/docs/latest/Plugins/)

// Adding title in link brackets
[](https://www.fastify.io/docs/latest/Plugins/ "fastify plugin")

// Empty title
[](https://www.fastify.io/docs/latest/Plugins/)

// Adding links localhost URLs instead of using code strings (``)
[http://localhost:3000/](http://localhost:3000/)

Include in your documentation as many essential references as possible, but +avoid having numerous links when writing to avoid distractions.

+ + + + \ No newline at end of file diff --git a/docs/next/contributing/index.html b/docs/next/contributing/index.html new file mode 100644 index 00000000000..44da79c1bb1 --- /dev/null +++ b/docs/next/contributing/index.html @@ -0,0 +1,18 @@ + + + + + +Contributing | Platformatic Open Source Software + + + + + +
+
+ + + + \ No newline at end of file diff --git a/docs/next/getting-started/architecture/index.html b/docs/next/getting-started/architecture/index.html new file mode 100644 index 00000000000..1e1604dacf7 --- /dev/null +++ b/docs/next/getting-started/architecture/index.html @@ -0,0 +1,25 @@ + + + + + +Architecture | Platformatic Open Source Software + + + + + +
+
Version: Next

Architecture

Platformatic is a collection of Open Source tools designed to eliminate friction +in backend development. The first of those tools is Platformatic DB, which is developed +as @platformatic/db.

Platformatic DB

Platformatic DB can expose a SQL database by dynamically mapping it to REST/OpenAPI +and GraphQL endpoints. It supports a limited subset of the SQL query language, but +also allows developers to add their own custom routes and resolvers.

Platformatic DB Architecture

Platformatic DB is composed of a few key libraries:

  1. @platformatic/sql-mapper - follows the Data Mapper pattern to build an API on top of a SQL database. +Internally it uses the @database project.
  2. @platformatic/sql-openapi - uses sql-mapper to create a series of REST routes and matching OpenAPI definitions. +Internally it uses @fastify/swagger.
  3. @platformatic/sql-graphql - uses sql-mapper to create a GraphQL endpoint and schema. sql-graphql also support Federation. +Internally it uses mercurius.

Platformatic DB allows you to load a Fastify plugin during server startup that contains your own application-specific code. +The plugin can add more routes or resolvers — these will automatically be shown in the OpenAPI and GraphQL schemas.

SQL database migrations are also supported. They're implemented internally with the postgrator library.

+ + + + \ No newline at end of file diff --git a/docs/next/getting-started/movie-quotes-app-tutorial/index.html b/docs/next/getting-started/movie-quotes-app-tutorial/index.html new file mode 100644 index 00000000000..e75f9f58743 --- /dev/null +++ b/docs/next/getting-started/movie-quotes-app-tutorial/index.html @@ -0,0 +1,129 @@ + + + + + +Movie Quotes App Tutorial | Platformatic Open Source Software + + + + + +
+
Version: Next

Movie Quotes App Tutorial

This tutorial will help you learn how to build a full stack application on top +of Platformatic DB. We're going to build an application that allows us to +save our favourite movie quotes. We'll also be building in custom API functionality +that allows for some neat user interaction on our frontend.

You can find the complete code for the application that we're going to build +on GitHub.

note

We'll be building the frontend of our application with the Astro +framework, but the GraphQL API integration steps that we're going to cover can +be applied with most frontend frameworks.

What we're going to cover

In this tutorial we'll learn how to:

  • Create a Platformatic API
  • Apply database migrations
  • Create relationships between our API entities
  • Populate our database tables
  • Build a frontend application that integrates with our GraphQL API
  • Extend our API with custom functionality
  • Enable CORS on our Platformatic API

Prerequisites

To follow along with this tutorial you'll need to have these things installed:

You'll also need to have some experience with JavaScript, and be comfortable with +running commands in a terminal.

Build the backend

Create a Platformatic API

First, let's create our project directory:

mkdir -p tutorial-movie-quotes-app/apps/movie-quotes-api/

cd tutorial-movie-quotes-app/apps/movie-quotes-api/

Run this command in your terminal to start the Platformatic creator wizard:

npm create platformatic@latest

This interactive command-line tool will ask you some questions about how you'd +like to set up your new Platformatic project. For this guide, select these options:

- Which kind of project do you want to create?  => DB
- Where would you like to create your project? => quick-start
- Do you want to create default migrations? => Yes
- Do you want to create a plugin? => Yes
- Do you want to use TypeScript? => No
- Do you want to install dependencies? => Yes (this can take a while)
- Do you want to apply the migrations? => Yes
- Do you want to generate types? => Yes
- Do you want to create the github action to deploy this application to Platformatic Cloud dynamic workspace? => No
- Do you want to create the github action to deploy this application to Platformatic Cloud static workspace? => No

Once the wizard is complete, you'll have a Platformatic app project in the +folder quick-start, with example migration files and a plugin script.

info

Make sure you run the npm/yarn/pnpm command install command manually if you +don't ask the wizard to do it for you.

Define the database schema

Let's create a new directory to store our migration files:

mkdir migrations

Then we'll create a migration file named 001.do.sql in the migrations +directory:

CREATE TABLE quotes (
id INTEGER PRIMARY KEY,
quote TEXT NOT NULL,
said_by VARCHAR(255) NOT NULL,
created_at DATETIME DEFAULT CURRENT_TIMESTAMP
);

Now let's setup migrations in our Platformatic configuration +file, platformatic.db.json:

{
"$schema": "https://platformatic.dev/schemas/v0.23.2/db",
"server": {
"hostname": "{PLT_SERVER_HOSTNAME}",
"port": "{PORT}",
"logger": {
"level": "{PLT_SERVER_LOGGER_LEVEL}"
}
},
"db": {
"connectionString": "{DATABASE_URL}",
"graphql": true,
"openapi": true
},
"plugins": {
"paths": [
"plugin.js"
]
},
"types": {
"autogenerate": true
},
"migrations": {
"dir": "migrations",
"autoApply": true
}
}
info

Take a look at the Configuration reference +to see all the supported configuration settings.

Now we can start the Platformatic DB server:

npm run start

Our Platformatic DB server should start, and we'll see messages like these:

[11:26:48.772] INFO (15235): running 001.do.sql
[11:26:48.864] INFO (15235): server listening
url: "http://127.0.0.1:3042"

Let's open a new terminal and make a request to our server's REST API that +creates a new quote:

curl --request POST --header "Content-Type: application/json" \
-d "{ \"quote\": \"Toto, I've got a feeling we're not in Kansas anymore.\", \"saidBy\": \"Dorothy Gale\" }" \
http://localhost:3042/quotes

We should receive a response like this from the API:

{"id":1,"quote":"Toto, I've got a feeling we're not in Kansas anymore.","saidBy":"Dorothy Gale","createdAt":"1684167422600"}

Create an entity relationship

Now let's create a migration file named 002.do.sql in the migrations +directory:

CREATE TABLE movies (
id INTEGER PRIMARY KEY,
name TEXT NOT NULL UNIQUE
);

ALTER TABLE quotes ADD COLUMN movie_id INTEGER REFERENCES movies(id);

This SQL will create a new movies database table and also add a movie_id +column to the quotes table. This will allow us to store movie data in the +movies table and then reference them by ID in our quotes table.

Let's stop the Platformatic DB server with Ctrl + C, and then start it again:

npm run start

The new migration should be automatically applied and we'll see the log message +running 002.do.sql.

Our Platformatic DB server also provides a GraphQL API. Let's open up the GraphiQL +application in our web browser:

http://localhost:3042/graphiql

Now let's run this query with GraphiQL to add the movie for the quote that we +added earlier:

mutation {
saveMovie(input: { name: "The Wizard of Oz" }) {
id
}
}

We should receive a response like this from the API:

{
"data": {
"saveMovie": {
"id": "1"
}
}
}

Now we can update our quote to reference the movie:

mutation {
saveQuote(input: { id: 1, movieId: 1 }) {
id
quote
saidBy
createdAt
movie {
id
name
}
}
}

We should receive a response like this from the API:

{
"data": {
"saveQuote": {
"id": "1",
"quote": "Toto, I've got a feeling we're not in Kansas anymore.",
"saidBy": "Dorothy Gale",
"movie": {
"id": "1",
"name": "The Wizard of Oz"
}
}
}
}

Our Platformatic DB server has automatically identified the relationship +between our quotes and movies database tables. This allows us to make +GraphQL queries that retrieve quotes and their associated movies at the same +time. For example, to retrieve all quotes from our database we can run:

query {
quotes {
id
quote
saidBy
createdAt
movie {
id
name
}
}
}

To view the GraphQL schema that's generated for our API by Platformatic DB, +we can run this command in our terminal:

npx platformatic db schema graphql

The GraphQL schema shows all of the queries and mutations that we can run +against our GraphQL API, as well as the types of data that it expects as input.

Populate the database

Our movie quotes database is looking a little empty! We're going to create a +"seed" script to populate it with some data.

Let's create a new file named seed.js and copy and paste in this code:

'use strict'

const quotes = [
{
quote: "Toto, I've got a feeling we're not in Kansas anymore.",
saidBy: 'Dorothy Gale',
movie: 'The Wizard of Oz'
},
{
quote: "You're gonna need a bigger boat.",
saidBy: 'Martin Brody',
movie: 'Jaws'
},
{
quote: 'May the Force be with you.',
saidBy: 'Han Solo',
movie: 'Star Wars'
},
{
quote: 'I have always depended on the kindness of strangers.',
saidBy: 'Blanche DuBois',
movie: 'A Streetcar Named Desire'
}
]

module.exports = async function ({ entities, db, sql }) {
for (const values of quotes) {
const movie = await entities.movie.save({ input: { name: values.movie } })

console.log('Created movie:', movie)

const quote = {
quote: values.quote,
saidBy: values.saidBy,
movieId: movie.id
}

await entities.quote.save({ input: quote })

console.log('Created quote:', quote)
}
}
info

Take a look at the Seed a Database guide to learn more +about how database seeding works with Platformatic DB.

Let's stop our Platformatic DB server running and remove our SQLite database:

rm db.sqlite

Now let's create a fresh SQLite database by running our migrations:

npx platformatic db migrations apply

And then let's populate the quotes and movies tables with data using our +seed script:

npx platformatic db seed seed.js

Our database is full of data, but we don't have anywhere to display it. It's +time to start building our frontend!

Build the frontend

We're now going to use Astro to build our frontend +application. If you've not used it before, you might find it helpful +to read this overview +on how Astro components are structured.

tip

Astro provide some extensions and tools to help improve your +Editor Setup when building an +Astro application.

Create an Astro application

In the root tutorial-movie-quotes-app of our project, let's create a new directory for our frontent +application:

mkdir -p apps/movie-quotes-frontend/

cd apps/movie-quotes-frontend/

And then we'll create a new Astro project:

npm create astro@latest -- --template basics

It will ask you some questions about how you'd like to set up +your new Astro project. For this guide, select these options:

Where should we create your new project?

   .
◼ tmpl Using basics as project template
✔ Template copied

Install dependencies? (it's buggy, we'll do it afterwards)

   No
◼ No problem! Remember to install dependencies after setup.

Do you plan to write TypeScript?

   No
◼ No worries! TypeScript is supported in Astro by default, but you are free to continue writing JavaScript instead.

Initialize a new git repository?

   No
◼ Sounds good! You can always run git init manually.

Liftoff confirmed. Explore your project!
Run npm dev to start the dev server. CTRL+C to stop.
Add frameworks like react or tailwind using astro add.

Now we'll edit our Astro configuration file, astro.config.mjs and +copy and paste in this code:

import { defineConfig } from 'astro/config'

// https://astro.build/config
export default defineConfig({
output: 'server'
})

And we'll also edit our tsconfig.json file and add in this configuration:

{
"extends": "astro/tsconfigs/base",
"compilerOptions": {
"types": ["astro/client"]
}
}

Now we can start up the Astro development server with:

npm run dev

And then load up the frontend in our browser at http://localhost:3000

Now that everything is working, we'll remove all default *.astro files from the src/ directory, but we'll keep the directory structure. You can delete them now, or override them later.

Create a layout

In the src/layouts directory, let's create a new file named Layout.astro:

---
export interface Props {
title: string;
page?: string;
}
const { title, page } = Astro.props;
---

<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width" />
<title>{title}</title>
</head>
<body>
<header>
<h1>🎬 Movie Quotes</h1>
</header>
<nav>
<a href="/">All quotes</a>
</nav>
<section>
<slot />
</section>
</body>
</html>

The code between the --- is known as the component script, and the +code after that is the component template. The component script will only run +on the server side when a web browser makes a request. The component template +is rendered server side and sent back as an HTML response to the web browser.

Now we'll update src/pages/index.astro to use this Layout component. +Let's replace the contents of src/pages/index.astro with this code:

---
import Layout from '../layouts/Layout.astro';
---

<Layout title="All quotes" page="listing">
<main>
<p>We'll list all the movie quotes here.</p>
</main>
</Layout>

Integrate the urql GraphQL client

We're now going to integrate the URQL +GraphQL client into our frontend application. This will allow us to run queries +and mutations against our Platformatic GraphQL API.

Let's first install @urql/core and +graphql as project dependencies:

npm install @urql/core graphql

Then let's create a new .env file and add this configuration:

PUBLIC_GRAPHQL_API_ENDPOINT=http://127.0.0.1:3042/graphql

Now we'll create a new directory:

mkdir src/lib

And then create a new file named src/lib/quotes-api.js. In that file we'll +create a new URQL client:

// src/lib/quotes-api.js

import { createClient, cacheExchange, fetchExchange } from '@urql/core';

const graphqlClient = createClient({
url: import.meta.env.PUBLIC_GRAPHQL_API_ENDPOINT,
requestPolicy: "network-only",
exchanges: [cacheExchange, fetchExchange]
});

We'll also add a thin wrapper around the client that does some basic error +handling for us:

// src/lib/quotes-api.js

async function graphqlClientWrapper(method, gqlQuery, queryVariables = {}) {
const queryResult = await graphqlClient[method](
gqlQuery,
queryVariables
).toPromise();

if (queryResult.error) {
console.error("GraphQL error:", queryResult.error);
}

return {
data: queryResult.data,
error: queryResult.error,
};
}

export const quotesApi = {
async query(gqlQuery, queryVariables = {}) {
return await graphqlClientWrapper("query", gqlQuery, queryVariables);
},
async mutation(gqlQuery, queryVariables = {}) {
return await graphqlClientWrapper("mutation", gqlQuery, queryVariables);
}
}

And lastly, we'll export gql from the @urql/core package, to make it +simpler for us to write GraphQL queries in our pages:

// src/lib/quotes-api.js

export { gql } from "@urql/core";

Stop the Astro dev server and then start it again so it picks up the .env +file:

npm run dev

Display all quotes

Let's display all the movie quotes in src/pages/index.astro.

First, we'll update the component script at the top and add in a query to +our GraphQL API for quotes:

---
import Layout from '../layouts/Layout.astro';
import { quotesApi, gql } from '../lib/quotes-api';

const { data } = await quotesApi.query(gql`
query {
quotes {
id
quote
saidBy
createdAt
movie {
id
name
}
}
}
`);

const quotes = data?.quotes || [];
---

Then we'll update the component template to display the quotes:

<Layout title="All quotes" page="listing">
<main>
{quotes.length > 0 ? quotes.map((quote) => (
<div>
<blockquote>
<p>{quote.quote}</p>
</blockquote>
<p>
{quote.saidBy}, {quote.movie?.name}
</p>
<div>
<span>Added {new Date(Number(quote.createdAt)).toUTCString()}</span>
</div>
</div>
)) : (
<p>No movie quotes have been added.</p>
)}
</main>
</Layout>

And just like that, we have all the movie quotes displaying on the page!

Integrate Tailwind for styling

Automatically add the @astrojs/tailwind integration:

npx astro add tailwind --yes

Add the Tailwind CSS Typography +and Forms plugins:

npm install --save-dev @tailwindcss/typography @tailwindcss/forms

Import the plugins in our Tailwind configuration file:

// tailwind.config.cjs

/** @type {import('tailwindcss').Config} */
module.exports = {
content: ['./src/**/*.{astro,html,js,jsx,md,mdx,svelte,ts,tsx,vue}'],
theme: {
extend: {}
},
plugins: [
require('@tailwindcss/forms'),
require('@tailwindcss/typography')
]
}

Stop the Astro dev server and then start it again so it picks up all the +configuration changes:

npm run dev

Style the listing page

To style our listing page, let's add CSS classes to the component template in +src/layouts/Layout.astro:

---
export interface Props {
title: string;
page?: string;
}

const { title, page } = Astro.props;

const navActiveClasses = "font-bold bg-yellow-400 no-underline";
---

<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width" />
<title>{title}</title>
</head>
<body class="py-8">
<header class="prose mx-auto mb-6">
<h1>🎬 Movie Quotes</h1>
</header>
<nav class="prose mx-auto mb-6 border-y border-gray-200 flex">
<a href="/" class={`p-3 ${page === "listing" && navActiveClasses}`}>All quotes</a>
</nav>
<section class="prose mx-auto">
<slot />
</section>
</body>
</html>

Then let's add CSS classes to the component template in src/pages/index.astro:

<Layout title="All quotes">
<main>
{quotes.length > 0 ? quotes.map((quote) => (
<div class="border-b mb-6">
<blockquote class="text-2xl mb-0">
<p class="mb-4">{quote.quote}</p>
</blockquote>
<p class="text-xl mt-0 mb-8 text-gray-400">
{quote.saidBy}, {quote.movie?.name}
</p>
<div class="flex flex-col mb-6 text-gray-400">
<span class="text-gray-400 italic">Added {new Date(Number(quote.createdAt)).toUTCString()}</span>
</div>
</div>
)) : (
<p>No movie quotes have been added.</p>
)}
</main>
</Layout>

Our listing page is now looking much more user friendly!

Create an add quote page

We're going to create a form component that we can use for adding and editing +quotes.

First let's create a new component file, src/components/QuoteForm.astro:

---
export interface QuoteFormData {
id?: number;
quote?: string;
saidBy?: string;
movie?: string;
}

export interface Props {
action: string;
values?: QuoteFormData;
saveError?: boolean;
loadError?: boolean;
submitLabel: string;
}

const { action, values = {}, saveError, loadError, submitLabel } = Astro.props;
---

{saveError && <p class="text-lg bg-red-200 p-4">There was an error saving the quote. Please try again.</p>}
{loadError && <p class="text-lg bg-red-200 p-4">There was an error loading the quote. Please try again.</p>}

<form method="post" action={action} class="grid grid-cols-1 gap-6">
<label for="quote" class="block">
<span>Quote</span>
<textarea id="quote" name="quote" required="required" class="mt-1 w-full">{values.quote}</textarea>
</label>
<label for="said-by" class="block">
<span>Said by</span>
<input type="text" id="said-by" name="saidBy" required="required" value={values.saidBy} class="mt-1 w-full">
</label>
<label for="movie" class="block">
<span>Movie</span>
<input type="text" id="movie" name="movie" required="required" autocomplete="off" value={values.movie} class="form-input mt-1 w-full">
</label>
<input type="submit" value={submitLabel} disabled={loadError && "disabled"} class="bg-yellow-400 hover:bg-yellow-500 text-gray-900 round p-3" />
</form>

Create a new page file, src/pages/add.astro:

---
import Layout from '../layouts/Layout.astro';
import QuoteForm from '../components/QuoteForm.astro';
import type { QuoteFormData } from '../components/QuoteForm.astro';

let formData: QuoteFormData = {};
let saveError = false;
---

<Layout title="Add a movie quote" page="add">
<main>
<h2>Add a quote</h2>
<QuoteForm action="/add" values={formData} saveError={saveError} submitLabel="Add quote" />
</main>
</Layout>

And now let's add a link to this page in the layout navigation in src/layouts/Layout.astro:

<nav class="prose mx-auto mb-6 border-y border-gray-200 flex">
<a href="/" class={`p-3 ${page === "listing" && navActiveClasses}`}>All quotes</a>
<a href="/add" class={`p-3 ${page === "add" && navActiveClasses}`}>Add a quote</a>
</nav>

Send form data to the API

When a user submits the add quote form we want to send the form data to our API +so it can then save it to our database. Let's wire that up now.

First we're going to create a new file, src/lib/request-utils.js:

export function isPostRequest (request) {
return request.method === 'POST'
}

export async function getFormData (request) {
const formData = await request.formData()

return Object.fromEntries(formData.entries())
}

Then let's update the component script in src/pages/add.astro to use +these new request utility functions:

---
import Layout from '../layouts/Layout.astro';
import QuoteForm from '../components/QuoteForm.astro';
import type { QuoteFormData } from '../components/QuoteForm.astro';

import { isPostRequest, getFormData } from '../lib/request-utils';

let formData: QuoteFormData = {};
let saveError = false;

if (isPostRequest(Astro.request)) {
formData = await getFormData(Astro.request);
}
---

When we create a new quote entity record via our API, we need to include a +movieId field that references a movie entity record. This means that when a +user submits the add quote form we need to:

  • Check if a movie entity record already exists with that movie name
  • Return the movie id if it does exist
  • If it doesn't exist, create a new movie entity record and return the movie ID

Let's update the import statement at the top of src/lib/quotes-api.js

-import { createClient } from '@urql/core'
+import { createClient, gql } from '@urql/core'

And then add a new method that will return a movie ID for us:

async function getMovieId (movieName) {
movieName = movieName.trim()

let movieId = null

// Check if a movie already exists with the provided name.
const queryMoviesResult = await quotesApi.query(
gql`
query ($movieName: String!) {
movies(where: { name: { eq: $movieName } }) {
id
}
}
`,
{ movieName }
)

if (queryMoviesResult.error) {
return null
}

const movieExists = queryMoviesResult.data?.movies.length === 1
if (movieExists) {
movieId = queryMoviesResult.data.movies[0].id
} else {
// Create a new movie entity record.
const saveMovieResult = await quotesApi.mutation(
gql`
mutation ($movieName: String!) {
saveMovie(input: { name: $movieName }) {
id
}
}
`,
{ movieName }
)

if (saveMovieResult.error) {
return null
}

movieId = saveMovieResult.data?.saveMovie.id
}

return movieId
}

And let's export it too:

export const quotesApi = {
async query (gqlQuery, queryVariables = {}) {
return await graphqlClientWrapper('query', gqlQuery, queryVariables)
},
async mutation (gqlQuery, queryVariables = {}) {
return await graphqlClientWrapper('mutation', gqlQuery, queryVariables)
},
getMovieId
}

Now we can wire up the last parts in the src/pages/add.astro component +script:

---
import Layout from '../layouts/Layout.astro';
import QuoteForm from '../components/QuoteForm.astro';
import type { QuoteFormData } from '../components/QuoteForm.astro';

import { quotesApi, gql } from '../lib/quotes-api';
import { isPostRequest, getFormData } from '../lib/request-utils';

let formData: QuoteFormData = {};
let saveError = false;

if (isPostRequest(Astro.request)) {
formData = await getFormData(Astro.request);

const movieId = await quotesApi.getMovieId(formData.movie);

if (movieId) {
const quote = {
quote: formData.quote,
saidBy: formData.saidBy,
movieId,
};

const { error } = await quotesApi.mutation(gql`
mutation($quote: QuoteInput!) {
saveQuote(input: $quote) {
id
}
}
`, { quote });

if (!error) {
return Astro.redirect('/');
} else {
saveError = true;
}
} else {
saveError = true;
}
}

Add autosuggest for movies

We can create a better experience for our users by autosuggesting the movie name +when they're adding a new quote.

Let's open up src/components/QuoteForm.astro and import our API helper methods +in the component script:

import { quotesApi, gql } from '../lib/quotes-api.js';

Then let's add in a query to our GraphQL API for all movies:

const { data } = await quotesApi.query(gql`
query {
movies {
name
}
}
`);

const movies = data?.movies || [];

Now lets update the Movie field in the component template to use the +array of movies that we've retrieved from the API:

<label for="movie" class="block">
<span>Movie</span>
<input list="movies" id="movie" name="movie" required="required" autocomplete="off" value={values.movie} class="form-input mt-1 w-full">
<datalist id="movies">
{movies.map(({ name }) => (
<option>{name}</option>
))}
</datalist>
</label>

Create an edit quote page

Let's create a new directory, src/pages/edit/:

mkdir src/pages/edit/

And inside of it, let's create a new page, [id].astro:

---
import Layout from '../../layouts/Layout.astro';
import QuoteForm, { QuoteFormData } from '../../components/QuoteForm.astro';

const id = Number(Astro.params.id);

let formValues: QuoteFormData = {};
let loadError = false;
let saveError = false;
---

<Layout title="Edit movie quote">
<main>
<h2>Edit quote</h2>
<QuoteForm action={`/edit/${id}`} values={formValues} saveError={saveError} loadError={loadError} submitLabel="Update quote" />
</main>
</Layout>

You'll see that we're using the same QuoteForm component that our add quote +page uses. Now we're going to wire up our edit page so that it can load an +existing quote from our API and save changes back to the API when the form is +submitted.

In the [id.astro] component script, let's add some code to take care of +these tasks:

---
import Layout from '../../layouts/Layout.astro';
import QuoteForm, { QuoteFormData } from '../../components/QuoteForm.astro';

import { quotesApi, gql } from '../../lib/quotes-api';
import { isPostRequest, getFormData } from '../../lib/request-utils';

const id = Number(Astro.params.id);

let formValues: QuoteFormData = {};
let loadError = false;
let saveError = false;

if (isPostRequest(Astro.request)) {
const formData = await getFormData(Astro.request);
formValues = formData;

const movieId = await quotesApi.getMovieId(formData.movie);

if (movieId) {
const quote = {
id,
quote: formData.quote,
saidBy: formData.saidBy,
movieId,
};

const { error } = await quotesApi.mutation(gql`
mutation($quote: QuoteInput!) {
saveQuote(input: $quote) {
id
}
}
`, { quote });

if (!error) {
return Astro.redirect('/');
} else {
saveError = true;
}
} else {
saveError = true;
}
} else {
const { data } = await quotesApi.query(gql`
query($id: ID!) {
getQuoteById(id: $id) {
id
quote
saidBy
movie {
id
name
}
}
}
`, { id });

if (data?.getQuoteById) {
formValues = {
...data.getQuoteById,
movie: data.getQuoteById.movie.name
};
} else {
loadError = true;
}
}
---

Load up http://localhost:3000/edit/1 in your +browser to test out the edit quote page.

Now we're going to add edit links to the quotes listing page. Let's start by +creating a new component src/components/QuoteActionEdit.astro:

---
export interface Props {
id: number;
}

const { id } = Astro.props;
---
<a href={`/edit/${id}`} class="flex items-center mr-5 text-gray-400 hover:text-yellow-600 underline decoration-yellow-600 decoration-2 underline-offset-4">
<svg class="w-6 h-6 mr-1" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24" fill="currentColor" class="w-6 h-6">
<path d="M21.731 2.269a2.625 2.625 0 00-3.712 0l-1.157 1.157 3.712 3.712 1.157-1.157a2.625 2.625 0 000-3.712zM19.513 8.199l-3.712-3.712-8.4 8.4a5.25 5.25 0 00-1.32 2.214l-.8 2.685a.75.75 0 00.933.933l2.685-.8a5.25 5.25 0 002.214-1.32l8.4-8.4z" />
<path d="M5.25 5.25a3 3 0 00-3 3v10.5a3 3 0 003 3h10.5a3 3 0 003-3V13.5a.75.75 0 00-1.5 0v5.25a1.5 1.5 0 01-1.5 1.5H5.25a1.5 1.5 0 01-1.5-1.5V8.25a1.5 1.5 0 011.5-1.5h5.25a.75.75 0 000-1.5H5.25z" />
</svg>
<span class="hover:underline hover:decoration-yellow-600">Edit</span>
</a>

Then let's import this component and use it in our listing page, +src/pages/index.astro:

---
import Layout from '../layouts/Layout.astro';
import QuoteActionEdit from '../components/QuoteActionEdit.astro';
import { quotesApi, gql } from '../lib/quotes-api';

// ...
---

<Layout title="All quotes" page="listing">
<main>
{quotes.length > 0 ? quotes.map((quote) => (
<div class="border-b mb-6">
...
<div class="flex flex-col mb-6 text-gray-400">
<span class="flex items-center">
<QuoteActionEdit id={quote.id} />
</span>
<span class="mt-4 text-gray-400 italic">Added {new Date(Number(quote.createdAt)).toUTCString()}</span>
</div>
</div>
)) : (
<p>No movie quotes have been added.</p>
)}
</main>
</Layout>

Add delete quote functionality

Our Movie Quotes app can create, retrieve and update quotes. Now we're going +to implement the D in CRUD — delete!

First let's create a new component, src/components/QuoteActionDelete.astro:

---
export interface Props {
id: number;
}

const { id } = Astro.props;
---
<form method="POST" action={`/delete/${id}`} class="form-delete-quote m-0">
<button type="submit" class="flex items-center text-gray-400 hover:text-red-700 underline decoration-red-700 decoration-2 underline-offset-4">
<svg class="w-6 h-6 mr-1" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24" fill="currentColor" class="w-6 h-6">
<path fill-rule="evenodd" d="M12 2.25c-5.385 0-9.75 4.365-9.75 9.75s4.365 9.75 9.75 9.75 9.75-4.365 9.75-9.75S17.385 2.25 12 2.25zm-1.72 6.97a.75.75 0 10-1.06 1.06L10.94 12l-1.72 1.72a.75.75 0 101.06 1.06L12 13.06l1.72 1.72a.75.75 0 101.06-1.06L13.06 12l1.72-1.72a.75.75 0 10-1.06-1.06L12 10.94l-1.72-1.72z" clip-rule="evenodd" />
</svg>
<span>Delete</span>
</button>
</form>

And then we'll drop it into our listing page, src/pages/index.astro:

---
import Layout from '../layouts/Layout.astro';
import QuoteActionEdit from '../components/QuoteActionEdit.astro';
import QuoteActionDelete from '../components/QuoteActionDelete.astro';
import { quotesApi, gql } from '../lib/quotes-api';

// ...
---

<Layout title="All quotes" page="listing">
<main>
{quotes.length > 0 ? quotes.map((quote) => (
<div class="border-b mb-6">
...
<div class="flex flex-col mb-6 text-gray-400">
<span class="flex items-center">
<QuoteActionEdit id={quote.id} />
<QuoteActionDelete id={quote.id} />
</span>
<span class="mt-4 text-gray-400 italic">Added {new Date(Number(quote.createdAt)).toUTCString()}</span>
</div>
</div>
...

At the moment when a delete form is submitted from our listing page, we get +an Astro 404 page. Let's fix this by creating a new directory, src/pages/delete/:

mkdir src/pages/delete/

And inside of it, let's create a new page, [id].astro:

---
import Layout from '../../layouts/Layout.astro';

import { quotesApi, gql } from '../../lib/quotes-api';
import { isPostRequest } from '../../lib/request-utils';

if (isPostRequest(Astro.request)) {
const id = Number(Astro.params.id);

const { error } = await quotesApi.mutation(gql`
mutation($id: ID!) {
deleteQuotes(where: { id: { eq: $id }}) {
id
}
}
`, { id });

if (!error) {
return Astro.redirect('/');
}
}
---
<Layout title="Delete movie quote">
<main>
<h2>Delete quote</h2>
<p class="text-lg bg-red-200 p-4">There was an error deleting the quote. Please try again.</p>
</main>
</Layout>

Now if we click on a delete quote button on our listings page, it should call our +GraphQL API to delete the quote. To make this a little more user friendly, let's +add in a confirmation dialog so that users don't delete a quote by accident.

Let's create a new directory, src/scripts/:

mkdir src/scripts/

And inside of that directory let's create a new file, quote-actions.js:

// src/scripts/quote-actions.js

export function confirmDeleteQuote (form) {
if (confirm('Are you sure want to delete this quote?')) {
form.submit()
}
}

Then we can pull it in as client side JavaScript on our listing page, +src/pages/index.astro:

<Layout>
...
</Layout>

<script>
import { confirmDeleteQuote } from '../scripts/quote-actions.js'

addEventListener('DOMContentLoaded', () => {
document.querySelectorAll('.form-delete-quote').forEach((deleteForm) => {
deleteForm.addEventListener('submit', (event) => {
event.preventDefault()
confirmDeleteQuote(event.currentTarget)
})
})
})
</script>

Build a "like" quote feature

We've built all the basic CRUD (Create, Retrieve, Update & Delete) features +into our application. Now let's build a feature so that users can interact +and "like" their favourite movie quotes.

To build this feature we're going to add custom functionality to our API +and then add a new component, along with some client side JavaScript, to +our frontend.

Create an API migration

We're now going to work on the code for API, under the apps/movie-quotes-api +directory.

First let's create a migration that adds a likes column to our quotes +database table. We'll create a new migration file, migrations/003.do.sql:

ALTER TABLE quotes ADD COLUMN likes INTEGER default 0;

This migration will automatically be applied when we next start our Platformatic +API.

Create an API plugin

To add custom functionality to our Platformatic API, we need to create a +Fastify plugin and +update our API configuration to use it.

Let's create a new file, plugin.js, and inside it we'll add the skeleton +structure for our plugin:

// plugin.js

'use strict'

module.exports = async function plugin (app) {
app.log.info('plugin loaded')
}

Now let's register our plugin in our API configuration file, platformatic.db.json:

{
...
"migrations": {
"dir": "./migrations"
},
"plugins": {
"paths": ["./plugin.js"]
}
}

And then we'll start up our Platformatic API:

npm run dev

We should see log messages that tell us that our new migration has been +applied and our plugin has been loaded:

[10:09:20.052] INFO (146270): running 003.do.sql
[10:09:20.129] INFO (146270): plugin loaded
[10:09:20.209] INFO (146270): server listening
url: "http://127.0.0.1:3042"

Now it's time to start adding some custom functionality inside our plugin.

Add a REST API route

We're going to add a REST route to our API that increments the count of +likes for a specific quote: /quotes/:id/like

First let's add fluent-json-schema as a dependency for our API:

npm install fluent-json-schema

We'll use fluent-json-schema to help us generate a JSON Schema. We can then +use this schema to validate the request path parameters for our route (id).

tip

You can use fastify-type-provider-typebox or typebox if you want to convert your JSON Schema into a Typescript type. See this GitHub thread to have a better overview about it. Look at the example below to have a better overview.

Here you can see in practice of to leverage typebox combined with fastify-type-provider-typebox:

import { FastifyInstance } from "fastify";
import { Static, Type } from "@sinclair/typebox";
import { TypeBoxTypeProvider } from "@fastify/type-provider-typebox";

/**
* Creation of the JSON schema needed to validate the params passed to the route
*/
const schemaParams = Type.Object({
num1: Type.Number(),
num2: Type.Number(),
});

/**
* We convert the JSON schema to the TypeScript type, in this case:
* {
num1: number;
num2: number;
}
*/
type Params = Static<typeof schemaParams>;

/**
* Here we can pass the type previously created to our syncronous unit function
*/
const multiplication = ({ num1, num2 }: Params) => num1 * num2;

export default async function (app: FastifyInstance) {
app.withTypeProvider<TypeBoxTypeProvider>().get(
"/multiplication/:num1/:num2",
{ schema: { params: schemaParams } },
/**
* Since we leverage `withTypeProvider<TypeBoxTypeProvider>()`,
* we no longer need to explicitly define the `params`.
* The will be automatically inferred as:
* {
num1: number;
num2: number;
}
*/
({ params }) => multiplication(params)
);
}

Now let's add our REST API route in plugin.js:

'use strict'

const S = require('fluent-json-schema')

module.exports = async function plugin (app) {
app.log.info('plugin loaded')

// This JSON Schema will validate the request path parameters.
// It reuses part of the schema that Platormatic DB has
// automatically generated for our Quote entity.
const schema = {
params: S.object().prop('id', app.getSchema('Quote').properties.id)
}

app.post('/quotes/:id/like', { schema }, async function (request, response) {
return {}
})
}

We can now make a POST request to our new API route:

curl --request POST http://localhost:3042/quotes/1/like
info

Learn more about how validation works in the +Fastify validation documentation.

Our API route is currently returning an empty object ({}). Let's wire things +up so that it increments the number of likes for the quote with the specified ID. +To do this we'll add a new function inside of our plugin:

module.exports = async function plugin (app) {
app.log.info('plugin loaded')

async function incrementQuoteLikes (id) {
const { db, sql } = app.platformatic

const result = await db.query(sql`
UPDATE quotes SET likes = likes + 1 WHERE id=${id} RETURNING likes
`)

return result[0]?.likes
}

// ...
}

And then we'll call that function in our route handler function:

app.post('/quotes/:id/like', { schema }, async function (request, response) {
return { likes: await incrementQuoteLikes(request.params.id) }
})

Now when we make a POST request to our API route:

curl --request POST http://localhost:3042/quotes/1/like

We should see that the likes value for the quote is incremented every time +we make a request to the route.

{"likes":1}

Add a GraphQL API mutation

We can add a likeQuote mutation to our GraphQL API by reusing the +incrementQuoteLikes function that we just created.

Let's add this code at the end of our plugin, inside plugin.js:

module.exports = async function plugin (app) {
// ...

app.graphql.extendSchema(`
extend type Mutation {
likeQuote(id: ID!): Int
}
`)

app.graphql.defineResolvers({
Mutation: {
likeQuote: async (_, { id }) => await incrementQuoteLikes(id)
}
})
}

The code we've just added extends our API's GraphQL schema and defines +a corresponding resolver for the likeQuote mutation.

We can now load up GraphiQL in our web browser and try out our new likeQuote +mutation with this GraphQL query:

mutation {
likeQuote(id: 1)
}
info

Learn more about how to extend the GraphQL schema and define resolvers in the +Mercurius API documentation.

Enable CORS on the API

When we build "like" functionality into our frontend, we'll be making a client +side HTTP request to our GraphQL API. Our backend API and our frontend are running +on different origins, so we need to configure our API to allow requests from +the frontend. This is known as Cross-Origin Resource Sharing (CORS).

To enable CORS on our API, let's open up our API's .env file and add in +a new setting:

PLT_SERVER_CORS_ORIGIN=http://localhost:3000

The value of PLT_SERVER_CORS_ORIGIN is our frontend application's origin.

Now we can add a cors configuration object in our API's configuration file, +platformatic.db.json:

{
"server": {
"logger": {
"level": "{PLT_SERVER_LOGGER_LEVEL}"
},
"hostname": "{PLT_SERVER_HOSTNAME}",
"port": "{PORT}",
"cors": {
"origin": "{PLT_SERVER_CORS_ORIGIN}"
}
},
...
}

The HTTP responses from all endpoints on our API will now include the header:

access-control-allow-origin: http://localhost:3000

This will allow JavaScript running on web pages under the http://localhost:3000 +origin to make requests to our API.

Add like quote functionality

Now that our API supports "liking" a quote, let's integrate it as a feature in +our frontend.

First we'll create a new component, src/components/QuoteActionLike.astro:

---
export interface Props {
id: number;
likes: number;
}

const { id, likes } = Astro.props;
---
<span data-quote-id={id} class="like-quote cursor-pointer mr-5 flex items-center">
<svg class="like-icon w-6 h-6 mr-2 text-red-600" xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 24 24" stroke-width="1.5" stroke="currentColor" class="w-6 h-6">
<path stroke-linecap="round" stroke-linejoin="round" d="M21 8.25c0-2.485-2.099-4.5-4.688-4.5-1.935 0-3.597 1.126-4.312 2.733-.715-1.607-2.377-2.733-4.313-2.733C5.1 3.75 3 5.765 3 8.25c0 7.22 9 12 9 12s9-4.78 9-12z" />
</svg>
<span class="likes-count w-8">{likes}</span>
</span>

<style>
.like-quote:hover .like-icon,
.like-quote.liked .like-icon {
fill: currentColor;
}
</style>

And in our listing page, src/pages/index.astro, let's import our new +component and add it into the interface:

---
import Layout from '../layouts/Layout.astro';
import QuoteActionEdit from '../components/QuoteActionEdit.astro';
import QuoteActionDelete from '../components/QuoteActionDelete.astro';
import QuoteActionLike from '../components/QuoteActionLike.astro';
import { quotesApi, gql } from '../lib/quotes-api';

// ...
---

<Layout title="All quotes" page="listing">
<main>
{quotes.length > 0 ? quotes.map((quote) => (
<div class="border-b mb-6">
...
<div class="flex flex-col mb-6 text-gray-400">
<span class="flex items-center">
<QuoteActionLike id={quote.id} likes={quote.likes} />
<QuoteActionEdit id={quote.id} />
<QuoteActionDelete id={quote.id} />
</span>
<span class="mt-4 text-gray-400 italic">Added {new Date(Number(quote.createdAt)).toUTCString()}</span>
</div>
</div>
...

Then let's update the GraphQL query in this component's script to retrieve the +likes field for all quotes:

const { data } = await quotesApi.query(gql`
query {
quotes {
id
quote
saidBy
likes
createdAt
movie {
id
name
}
}
}
`);

Now we have the likes showing for each quote, let's wire things up so that +clicking on the like component for a quote will call our API and add a like.

Let's open up src/scripts/quote-actions.js and add a new function that +makes a request to our GraphQL API:

import { quotesApi, gql } from '../lib/quotes-api.js'

export function confirmDeleteQuote (form) {
if (confirm('Are you sure want to delete this quote?')) {
form.submit()
}
}

export async function likeQuote (likeQuote) {
likeQuote.classList.add('liked')
likeQuote.classList.remove('cursor-pointer')

const id = Number(likeQuote.dataset.quoteId)

const { data } = await quotesApi.mutation(gql`
mutation($id: ID!) {
likeQuote(id: $id)
}
`, { id })

if (data?.likeQuote) {
likeQuote.querySelector('.likes-count').innerText = data.likeQuote
}
}

And then let's attach the likeQuote function to the click event for each +like quote component on our listing page. We can do this by adding a little +extra code inside the <script> block in src/pages/index.astro:

<script>
import { confirmDeleteQuote, likeQuote } from '../scripts/quote-actions.js'

addEventListener('DOMContentLoaded', () => {
document.querySelectorAll('.form-delete-quote').forEach((deleteForm) => {
deleteForm.addEventListener('submit', (event) => {
event.preventDefault()
confirmDeleteQuote(event.currentTarget)
})
})

document.querySelectorAll('.like-quote').forEach((container) => {
container.addEventListener('click', (event) => likeQuote(event.currentTarget), { once: true })
})
})
</script>

Sort the listing by top quotes

Now that users can like their favourite quotes, as a final step, we'll allow +for sorting quotes on the listing page by the number of likes they have.

Let's update src/pages/index.astro to read a sort query string parameter +and use it the GraphQL query that we make to our API:

---
// ...

const allowedSortFields = ["createdAt", "likes"];
const searchParamSort = new URL(Astro.request.url).searchParams.get("sort");
const sort = allowedSortFields.includes(searchParamSort) ? searchParamSort : "createdAt";

const { data } = await quotesApi.query(gql`
query {
quotes(orderBy: {field: ${sort}, direction: DESC}) {
id
quote
saidBy
likes
createdAt
movie {
id
name
}
}
}
`);

const quotes = data?.quotes || [];
---
<Layout title="All quotes" page={`listing-${sort}`}>
...

Then let's replace the 'All quotes' link in the <nav> in src/layouts/Layout.astro +with two new links:

<nav class="prose mx-auto mb-6 border-y border-gray-200 flex">
<a href="/?sort=createdAt" class={`p-3 ${page === "listing-createdAt" && navActiveClasses}`}>Latest quotes</a>
<a href="/?sort=likes" class={`p-3 ${page === "listing-likes" && navActiveClasses}`}>Top quotes</a>
<a href="/add" class={`p-3 ${page === "add" && navActiveClasses}`}>Add a quote</a>
</nav>

With these few extra lines of code, our users can now sort quotes by when they +were created or by the number of likes that they have. Neat!

Wrapping up

And we're done — you now have the knowledge you need to build a full stack +application on top of Platformatic DB.

We can't wait to see what you'll build next!

+ + + + \ No newline at end of file diff --git a/docs/next/getting-started/new-api-project-instructions/index.html b/docs/next/getting-started/new-api-project-instructions/index.html new file mode 100644 index 00000000000..d4079b33f3c --- /dev/null +++ b/docs/next/getting-started/new-api-project-instructions/index.html @@ -0,0 +1,20 @@ + + + + + +new-api-project-instructions | Platformatic Open Source Software + + + + + +
+
Version: Next

new-api-project-instructions

Run this command in your terminal to start the Platformatic creator wizard:

npm create platformatic@latest

This interactive command-line tool will ask you some questions about how you'd +like to set up your new Platformatic project. For this guide, select these options:

- Which kind of project do you want to create?  => DB
- Where would you like to create your project? => quick-start
- Do you want to create default migrations? => Yes
- Do you want to create a plugin? => Yes
- Do you want to use TypeScript? => No
- Do you want to install dependencies? => Yes (this can take a while)
- Do you want to apply the migrations? => Yes
- Do you want to generate types? => Yes
- Do you want to create the github action to deploy this application to Platformatic Cloud dynamic workspace? => No
- Do you want to create the github action to deploy this application to Platformatic Cloud static workspace? => No

Once the wizard is complete, you'll have a Platformatic app project in the +folder quick-start, with example migration files and a plugin script.

info

Make sure you run the npm/yarn/pnpm command install command manually if you +don't ask the wizard to do it for you.

+ + + + \ No newline at end of file diff --git a/docs/next/getting-started/quick-start-guide/index.html b/docs/next/getting-started/quick-start-guide/index.html new file mode 100644 index 00000000000..b481fd36e1a --- /dev/null +++ b/docs/next/getting-started/quick-start-guide/index.html @@ -0,0 +1,38 @@ + + + + + +Quick Start Guide | Platformatic Open Source Software + + + + + +
+
Version: Next

Quick Start Guide

In this guide you'll learn how to create and run your first API with +Platformatic DB. Let's get started!

info

This guide uses SQLite for the database, but +Platformatic DB also supports PostgreSQL, +MySQL and MariaDB databases.

Prerequisites

Platformatic supports macOS, Linux and Windows (WSL recommended).

To follow along with this guide you'll need to have these things installed:

Create a new API project

Automatic CLI

Run this command in your terminal to start the Platformatic creator wizard:

npm create platformatic@latest

This interactive command-line tool will ask you some questions about how you'd +like to set up your new Platformatic project. For this guide, select these options:

- Which kind of project do you want to create?  => DB
- Where would you like to create your project? => quick-start
- Do you want to create default migrations? => Yes
- Do you want to create a plugin? => Yes
- Do you want to use TypeScript? => No
- Do you want to install dependencies? => Yes (this can take a while)
- Do you want to apply the migrations? => Yes
- Do you want to generate types? => Yes
- Do you want to create the github action to deploy this application to Platformatic Cloud dynamic workspace? => No
- Do you want to create the github action to deploy this application to Platformatic Cloud static workspace? => No

Once the wizard is complete, you'll have a Platformatic app project in the +folder quick-start, with example migration files and a plugin script.

info

Make sure you run the npm/yarn/pnpm command install command manually if you +don't ask the wizard to do it for you.

Start your API server

In your project directory, run this command to start your API server:

npm start

Your Platformatic API is now up and running! 🌟

This command will:

  • Automatically map your SQL database to REST and GraphQL API interfaces.
  • Start the Platformatic API server.

You can jump down to Next steps or read on to learn more about +the project files that the wizard has created for you.

Check the database schema

In your project directory (quick-start), open the migrations directory that can store your database migration files that will contain both the 001.do.sql and 001.undo.sql files. The 001.do.sql file contains the SQL statements to create the database objects, while the 001.undo.sql file contains the SQL statements to drop them.

migrations/001.do.sql
CREATE TABLE IF NOT EXISTS movies (
id INTEGER PRIMARY KEY,
title TEXT NOT NULL
);

Note that this migration has been already applied by Platformatic creator.

Check your API configuration

In your project directory, check the Platformatic configuration file named +platformatic.db.json and the environment file named .env:

The created configuration tells Platformatic to:

  • Run an API server on http://127.0.0.1:3042/
  • Connect to an SQLite database stored in a file named db.sqlite
  • Look for database migration files in the migrations directory
  • Load the plugin file named plugin.js and automatically generate types
tip

The Configuration reference explains all of the +supported configuration options.

Manual setup

Create a directory for your new API project:

mkdir quick-start

cd quick-start

Then create a package.json file and install the platformatic +CLI as a project dependency:

npm init --yes

npm install platformatic

Add a database schema

In your project directory (quick-start), create a file for your sqlite3 database and also, a migrations directory to +store your database migration files:

touch db.sqlite

mkdir migrations

Then create a new migration file named 001.do.sql in the migrations +directory.

Copy and paste this SQL query into the migration file:

migrations/001.do.sql
CREATE TABLE movies (
id INTEGER PRIMARY KEY,
title VARCHAR(255) NOT NULL
);

When it's run by Platformatic, this query will create a new database table +named movies.

tip

You can check syntax for SQL queries on the Database.Guide SQL Reference.

Configure your API

In your project directory, create a new Platformatic configuration file named +platformatic.db.json.

Copy and paste in this configuration:

platformatic.db.json
{
"server": {
"hostname": "127.0.0.1",
"port": "3042"
},
"db": {
"connectionString": "sqlite://./db.sqlite"
},
"migrations": {
"dir": "./migrations",
"autoApply": "true"
}
}

This configuration tells Platformatic to:

  • Run an API server on http://127.0.0.1:3042/
  • Connect to an SQLite database stored in a file named db.sqlite
  • Look for, and apply the database migrations specified in the migrations directory
tip

The Configuration reference explains all of the +supported configuration options.

Start your API server

In your project directory, use the Platformatic CLI to start your API server:

npx platformatic db start

This will:

  • Automatically map your SQL database to REST and GraphQL API interfaces.
  • Start the Platformatic API server.

Your Platformatic API is now up and running! 🌟

Next steps

Use the REST API interface

You can use cURL to make requests to the REST interface of your API, for example:

Create a new movie

curl -X POST -H "Content-Type: application/json" \
-d "{ \"title\": \"Hello Platformatic DB\" }" \
http://localhost:3042/movies

You should receive a response from your API like this:

{"id":1,"title":"Hello Platformatic DB"}

Get all movies

curl http://localhost:3042/movies

You should receive a response from your API like this, with an array +containing all the movies in your database:

[{"id":1,"title":"Hello Platformatic DB"}]
tip

If you would like to know more about what routes are automatically available, +take a look at the REST API reference +for an overview of the REST interface that the generated API provides.

Swagger OpenAPI documentation

You can explore the OpenAPI documentation for your REST API in the Swagger UI at +http://localhost:3042/documentation

Use the GraphQL API interface

Open http://localhost:3042/graphiql in your +web browser to explore the GraphQL interface of your API.

Try out this GraphQL query to retrieve all movies from your API:

query {
movies {
id
title
}
}
tip

Learn more about your API's GraphQL interface in the +GraphQL API reference.

+ + + + \ No newline at end of file diff --git a/docs/next/guides/add-custom-functionality/extend-graphql/index.html b/docs/next/guides/add-custom-functionality/extend-graphql/index.html new file mode 100644 index 00000000000..8acccd8cdf9 --- /dev/null +++ b/docs/next/guides/add-custom-functionality/extend-graphql/index.html @@ -0,0 +1,18 @@ + + + + + +Extend GraphQL Schema | Platformatic Open Source Software + + + + + +
+
Version: Next

Extend GraphQL Schema

Sum Function

Copy and paste this code into ./sample-plugin.js file

'use strict'
module.exports = async(app, opts) => {
app.graphql.extendSchema(`
extend type Query {
add(x: Int, y: Int): Int
}
`)
app.graphql.defineResolvers({
Query: {
add: async (_, { x, y }) => x + y
}
})
}

This will add a new GraphQL query called add which will simply add the two inputs x and y provided.

You don't need to reload the server, since it will watch this file and hot-reload itself. +Let's query the server with the following body


query{
add(x: 1, y: 2)
}

You can use curl command to run this query

$ curl --location --request POST 'http://localhost:3042/graphql' \
--header 'Content-Type: application/json' \
--data-raw '{"query":"query{\n add(x: 1, y: 2)\n}"}'

You will get this output, with the sum.

{
"data": {
"add": 3
}
}

Extend Entities API

Let's implement a getPageByTitle query

'use strict'
module.exports = async(app, opts) => {
app.graphql.extendSchema(`
extend type Query {
getPageByTitle(title: String): Page
}
`)
app.graphql.defineResolvers({
Query: {
getPageByTitle: async(_, { title }) => {
const res = await app.platformatic.entities.page.find({
where: {
title: {
eq: title
}
}
})
if (res) {
return res[0]
}
return null
}
}
})
}

Page GraphQL type is already defined by Platformatic DB on start.

We are going to run this code against this GraphQL query

query{
getPageByTitle(title: "First Page"){
id
title
}
}

You can use curl command to run this query

$ curl --location --request POST 'http://localhost:3042/graphql' \
--header 'Content-Type: application/json' \
--data-raw '{"query":"query{\n getPageByTitle(title: \"First Page\"){\n id\n title\n }\n}"}'

You will get an output similar to this

{
"data": {
"getPageByTitle": {
"id": "1",
"title": "First Page"
}
}
}
+ + + + \ No newline at end of file diff --git a/docs/next/guides/add-custom-functionality/extend-rest/index.html b/docs/next/guides/add-custom-functionality/extend-rest/index.html new file mode 100644 index 00000000000..9374330274f --- /dev/null +++ b/docs/next/guides/add-custom-functionality/extend-rest/index.html @@ -0,0 +1,17 @@ + + + + + +Extend REST API | Platformatic Open Source Software + + + + + +
+
Version: Next

Extend REST API

We will follow same examples implemented in GraphQL examples: a sum function and an API to get pages by title.

Sum Function

Copy and paste this code into ./sample-plugin.js file

'use strict'
module.exports = async(app, opts) => {
app.post('/sum', async(req, reply) => {
const { x, y } = req.body
return { sum: (x + y)}
})
}

You don't need to reload the server, since it will watch this file and hot-reload itself.

Let's make a POST /sum request to the server with the following body

{
"x": 1,
"y": 2
}

You can use curl command to run this query

$ curl --location --request POST 'http://localhost:3042/sum' \
--header 'Content-Type: application/json' \
--data-raw '{
"x": 1,
"y": 2
}'

You will get this output, with the sum.

{
"sum": 3
}

Extend Entities API

Let's implement a /page-by-title endpoint, using Entities API

'use strict'
module.exports = async(app, opts) => {
app.get('/page-by-title', async(req, reply) => {
const { title } = req.query
const res = await app.platformatic.entities.page.find({
where: {
title: {
eq: title
}
}
})
if (res) {
return res[0]
}
return null
})
}

We will make a GET /page-by-title?title=First%20Page request, and we expect a single page as output.

You can use curl command to run this query

$ curl --location --request GET 'http://localhost:3042/page-by-title?title=First Page'

You will get an output similar to this

{
"id": "1",
"title": "First Page",
"body": "This is the first sample page"
}
+ + + + \ No newline at end of file diff --git a/docs/next/guides/add-custom-functionality/introduction/index.html b/docs/next/guides/add-custom-functionality/introduction/index.html new file mode 100644 index 00000000000..087e39e9315 --- /dev/null +++ b/docs/next/guides/add-custom-functionality/introduction/index.html @@ -0,0 +1,17 @@ + + + + + +Add Custom Functionality | Platformatic Open Source Software + + + + + +
+
Version: Next

Add Custom Functionality

If you want to extend Platformatic DB features, it is possible to register a plugin, which will be in the form of a standard Fastify plugin.

The config file will specify where the plugin file is located as the example below:

{
...
"plugins": {
"paths": ["./plugin/index.js"]
}
}

The path is relative to the config file path.

Since it uses fastify-isolate under the hood, all other options of that package may be specified under the plugin property.

Once the config file is set up, you can write your plugin

module.exports = async function (app) {
app.log.info('plugin loaded')
// Extend GraphQL Schema with resolvers
app.graphql.extendSchema(`
extend type Query {
add(x: Int, y: Int): Int
}
`)
app.graphql.defineResolvers({
Query: {
add: async (_, { x, y }) => x + y
}
})

// Create a new route, see https://www.fastify.io/docs/latest/Reference/Routes/ for more info
app.post('/sum', (req, reply) => {
const {x, y} = req.body
return { result: x + y }
})

// access platformatic entities data
app.get('/all-entities', (req, reply) => {
const entities = Object.keys(app.platformatic.entities)
return { entities }
})
}

+ + + + \ No newline at end of file diff --git a/docs/next/guides/add-custom-functionality/prerequisites/index.html b/docs/next/guides/add-custom-functionality/prerequisites/index.html new file mode 100644 index 00000000000..687c7e652a5 --- /dev/null +++ b/docs/next/guides/add-custom-functionality/prerequisites/index.html @@ -0,0 +1,17 @@ + + + + + +Prerequisites | Platformatic Open Source Software + + + + + +
+
Version: Next

Prerequisites

In the following examples we assume you already

  • cloned platformatic/platformatic repo from Github
  • ran pnpm install to install all dependencies
  • have Docker and docker-compose installed and running on your machine

Config File

Create a platformatic.db.json file in the root project, it will be loaded automatically by Platformatic (no need of -c, --config flag).

{
"server": {
"hostname": "127.0.0.1",
"port": 3042,
"logger": {
"level": "info"
}
},
"db": {
"connectionString": "postgres://postgres:postgres@127.0.0.1/postgres"
},
"migrations": {
"dir": "./migrations",
"table": "versions"
},
"plugins": {
"paths": ["plugin.js"]
}
}
  • Once Platformatic DB starts, its API will be available at http://127.0.0.1:3042
  • It will connect and read the schema from a PostgreSQL DB
  • Will read migrations from ./migrations directory
  • Will load custom functionallity from ./plugin.js file.

Database and Migrations

Start the database using the sample docker-compose.yml file.

$ docker-compose up -d postgresql

For migrations create a ./migrations directory and a 001.do.sql file with following contents

CREATE TABLE pages (
id SERIAL PRIMARY KEY,
title VARCHAR(255) NOT NULL,
body TEXT NOT NULL
);
INSERT INTO pages (title, body) VALUES ('First Page', 'This is the first sample page');
INSERT INTO pages (title, body) VALUES ('Second Page', 'This is the second sample page');
INSERT INTO pages (title, body) VALUES ('Third Page', 'This is the third sample page');

Plugin

Copy and paste this boilerplate code into ./plugin.js file. We will fill this in the examples.

'use strict'

module.exports = async (app, opts) {
// we will fill this later
}

Start the server

Run

$ platformatic db start

You will get an output similar to this

                           /////////////
///// /////
/// ///
/// ///
/// ///
&& /// /// &&
&&&&&& /// /// &&&&&&
&&&& /// /// &&&&
&&& /// /// &&&&&&&&&&&&
&&& /// /////// //// && &&&&&
&& /// /////////////// &&&
&&& /// /// &&&
&&& /// // &&
&&& /// &&
&&& /// &&&
&&&& /// &&&
&&&&&% /// &&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&
///
///
///
///
///
///

[11:19:46.562] INFO (65122): running 001.do.sql
[11:19:46.929] INFO (65122): server listening
url: "http://127.0.0.1:3042"

Now is possible to create some examples, like extend GraphQL Schema, extend REST API

+ + + + \ No newline at end of file diff --git a/docs/next/guides/add-custom-functionality/raw-sql/index.html b/docs/next/guides/add-custom-functionality/raw-sql/index.html new file mode 100644 index 00000000000..ff97b1804d2 --- /dev/null +++ b/docs/next/guides/add-custom-functionality/raw-sql/index.html @@ -0,0 +1,17 @@ + + + + + +Raw SQL queries | Platformatic Open Source Software + + + + + +
+
Version: Next

Raw SQL queries

To run raw SQL queries using plugins, use app.platformatic.db.query method and passe to it a sql query using the app.platformatic.sql method.

'use strict'
module.exports = async(app, opts) => {
app.graphql.extendSchema(`
type YearlySales {
year: Int
sales: Int
}

extend type Query {
yearlySales: [YearlySales]
}
`)
app.graphql.defineResolvers({
Query: {
yearlySales: async(_, { title }) => {
const { db, sql } = app.platformatic;
const res = await db.query(sql(`
SELECT
YEAR(created_at) AS year,
SUM(amount) AS sales
FROM
orders
GROUP BY
YEAR(created_at)
`))
return res
}
}
})
}
+ + + + \ No newline at end of file diff --git a/docs/next/guides/compiling-typescript-for-deployment/index.html b/docs/next/guides/compiling-typescript-for-deployment/index.html new file mode 100644 index 00000000000..5a94eabfe6d --- /dev/null +++ b/docs/next/guides/compiling-typescript-for-deployment/index.html @@ -0,0 +1,25 @@ + + + + + +Compiling Typescript for Deployment | Platformatic Open Source Software + + + + + +
+
Version: Next

Compiling Typescript for Deployment

Platformatic Service provides automatic TypeScript compilation during the startup +of your Node.js server. While this provides an amazing developer experience, in production it adds additional +start time and it requires more resources. In this guide, we show how to compile your TypeScript +source files before shipping to a server.

Setup

The following is supported by all Platformatic applications, as they are all based on the same plugin system. +If you have generated your application using npx create-platformatic@latest, you will have a similar section in your config file:

{
...
"plugins": {
"paths": [{
"path": "plugins",
"encapsulate": false
}, "routes"],
"typescript": "{PLT_TYPESCRIPT}"
}
}

Note that the {PLT_TYPESCRIPT} will be automatically replaced with the PLT_TYPESCRIPT environment variable, that is configured in your +.env (and .env.sample) file:

PLT_TYPESCRIPT=true

Older Platformatic applications might not have the same layout, if so you can update your settings to match (after updating your dependencies).

Compiling for deployment

Compiling for deployment is then as easy as running plt service compile in that same folder. +Rememeber to set PLT_TYPESCRIPT=false in your environment variables in the deployed environments.

Usage with Runtime

If you are building a Runtime-based application, you will need +to compile every service independently or use the plt runtime compile command.

Avoid shipping TypeScript sources

If you want to avoid shipping the TypeScript sources you need to configure Platformatic with the location +where your files have been built by adding an outDir option:

{
...
"plugins": {
"paths": [{
"path": "plugins",
"encapsulate": false
}, "routes"],
"typescript": {
"enabled": "{PLT_TYPESCRIPT}",
"outDir": "dist"
}
}
}

This is not necessary if you include tsconfig.json together with the compiled code.

+ + + + \ No newline at end of file diff --git a/docs/next/guides/debug-platformatic-db/index.html b/docs/next/guides/debug-platformatic-db/index.html new file mode 100644 index 00000000000..9873b8def5a --- /dev/null +++ b/docs/next/guides/debug-platformatic-db/index.html @@ -0,0 +1,17 @@ + + + + + +Debug Platformatic DB | Platformatic Open Source Software + + + + + +
+
Version: Next

Debug Platformatic DB

Error: No tables found in the database

  • Verify your database connection string is correct in your Platformatic DB configuration
    • Make sure the database name is correct
  • Ensure that you have run the migration command npx platformatic db migrations apply before starting the server. See the Platformatic DB Migrations documentation for more information on working with migrations.

Logging SQL queries

You can see all the queries that are being run against your database in your terminal by setting the logger level to trace in your platformatic.db.json config file:

platformatic.db.json
{
"server": {
"logger": {
"level": "trace"
}
}
}
+ + + + \ No newline at end of file diff --git a/docs/next/guides/deploying-on-lambda/index.html b/docs/next/guides/deploying-on-lambda/index.html new file mode 100644 index 00000000000..c796630a211 --- /dev/null +++ b/docs/next/guides/deploying-on-lambda/index.html @@ -0,0 +1,26 @@ + + + + + +Deploying on AWS Lambda | Platformatic Open Source Software + + + + + +
+
Version: Next

Deploying on AWS Lambda

It is possible to deploy Platformatic applications to AWS Lambda +by leveraging @fastify/aws-lambda.

Once you set up your Platformatic DB application, such as following +our tutorial, you can create a +server.mjs file as follows:

import awsLambdaFastify from '@fastify/aws-lambda'
import { buildServer } from '@platformatic/db'

const app = await buildServer('./platformatic.db.json')
// You can use the same approach with both Platformatic DB and
// and service
// const app = await buildServer('./platformatic.service.json')

// The following also work for Platformatic Service applications
// import { buildServer } from '@platformatic/service'
export const handler = awsLambdaFastify(app)

// Loads the Application, must be after the call to `awsLambdaFastify`
await app.ready()

This would be the entry point for your AWS Lambda function.

Avoiding cold start

Caching the DB schema

If you use Platformatic DB, you want to turn on the schemalock +configuration to cache the schema +information on disk.

Set the db.schemalock configuration to true, start the application, +and a schema.lock file should appear. Make sure to commit that file and +deploy your lambda.

Provisioned concurrency

Since AWS Lambda now enables the use of ECMAScript (ES) modules in Node.js 14 runtimes, +you could lower the cold start latency when used with Provisioned Concurrency +thanks to the top-level await functionality. (Excerpt taken from @fastify/aws-lambda)

+ + + + \ No newline at end of file diff --git a/docs/next/guides/deployment/advanced-fly-io-deployment/index.html b/docs/next/guides/deployment/advanced-fly-io-deployment/index.html new file mode 100644 index 00000000000..b790675a882 --- /dev/null +++ b/docs/next/guides/deployment/advanced-fly-io-deployment/index.html @@ -0,0 +1,22 @@ + + + + + +Advanced Fly.io Deployment | Platformatic Open Source Software + + + + + +
+
Version: Next

Advanced Fly.io Deployment

Techniques used in this guide are based on the Deploy to Fly.io with SQLite +deployment guide.

Adding sqlite for debugging

With a combination of Docker and Fly.io, you can create an easy way to debug +your sqlite aplication without stopping your application or exporting the data. +At the end of this guide, you will be able to run fly ssh console -C db-cli to +be dropped into your remote database.

Start by creating a script for launching the database, calling it db-cli.sh:

#!/bin/sh
set -x
# DSN will be defined in the Dockerfile
sqlite3 $DSN

Create a new Dockerfile which will act as the build and deployment image:

FROM node:18-alpine

# Setup sqlite viewer
RUN apk add sqlite
ENV DSN "/app/.platformatic/data/app.db"
COPY db-cli.sh /usr/local/bin/db-cli
RUN chmod +x /usr/local/bin/db-cli

WORKDIR /app
COPY package.json package.json
COPY package-lock.json package-lock.json

RUN npm ci --omit=dev

COPY platformatic.db.json platformatic.db.json

COPY migrations migrations
# Uncomment if your application is running a plugin
# COPY plugin.js plugin.js

EXPOSE 8080

CMD ["npm", "start"]

Add a start script to your package.json:

{
"scripts": {
"start": "platformatic db"
}
}

With Fly, it becomes straightforward to connect directly to the database by +running the following command from your local machine:

fly ssh console -C db-cli
+ + + + \ No newline at end of file diff --git a/docs/next/guides/deployment/deploy-to-fly-io-with-sqlite/index.html b/docs/next/guides/deployment/deploy-to-fly-io-with-sqlite/index.html new file mode 100644 index 00000000000..6e7ae49730e --- /dev/null +++ b/docs/next/guides/deployment/deploy-to-fly-io-with-sqlite/index.html @@ -0,0 +1,33 @@ + + + + + +Deploy to Fly.io with SQLite | Platformatic Open Source Software + + + + + +
+
Version: Next

Deploy to Fly.io with SQLite

note

To follow this how-to guide, you'll first need to install the Fly CLI and create +an account by following this official guide. +You will also need an existing Platformatic DB project, please check out our +getting started guide if needed.

Navigate to your Platformatic DB project in the terminal on your local machine. +Run fly launch and follow the prompts. When it asks if you want to deploy +now, say "no" as there are a few things that you'll need to configure first.

You can also create the fly application with one line. This will create your +application in London (lhr):

fly launch --no-deploy --generate-name --region lhr --org personal --path .

The fly CLI should have created a fly.toml file in your project +directory.

Explicit builder

The fly.toml file may be missing an explicit builder setting. To have +consistent builds, it is best to add a build section:

[build]
builder = "heroku/buildpacks:20"

Database storage

Create a volume for database storage, naming it data:

fly volumes create data

This will create storage in the same region as the application. The volume +defaults to 3GB size, use -s to change the size. For example, -s 10 is 10GB.

Add a mounts section in fly.toml:

[mounts]
source = "data"
destination = "/app/.platformatic/data"

Create a directory in your project where your SQLite database will be created:

mkdir -p .platformatic/data

touch .platformatic/data/.gitkeep

The .gitkeep file ensures that this directory will always be created when +your application is deployed.

You should also ensure that your SQLite database is ignored by Git. This helps +avoid inconsistencies when your application is deployed:

echo "*.db" >> .gitignore

The command above assumes that your SQLite database file ends with the extension +.db — if the extension is different then you must change the command to match.

Change the connection string to an environment variable and make sure that +migrations are autoApplying (for platformatic@^0.4.0) in platformatic.db.json:

{
"db": {
"connectionString": "{DATABASE_URL}"
},
"migrations": {
"dir": "./migrations",
"autoApply": true
}
}

Configure server

Make sure that your platformatic.db.json uses environment variables +for the server section:

{
"server": {
"logger": {
"level": "{PLT_SERVER_LOGGER_LEVEL}"
},
"hostname": "{PLT_SERVER_HOSTNAME}",
"port": "{PORT}"
}
}

Configure environment

Start with your local environment, create a .env file and put the following:

PORT=3042
PLT_SERVER_HOSTNAME=127.0.0.1
PLT_SERVER_LOGGER_LEVEL=debug
DATABASE_URL=sqlite://.platformatic/data/movie-quotes.db

Avoid accidental leaks by ignoring your .env file:

echo ".env" >> .gitignore

This same configuration needs to added to fly.toml:

[env]
PORT = 8080
PLT_SERVER_HOSTNAME = "0.0.0.0"
PLT_SERVER_LOGGER_LEVEL = "info"
DATABASE_URL = "sqlite:///app/.platformatic/data/movie-quotes.db"

Deploy application

A valid package.json will be needed so if you do not have one, generate one +by running npm init.

In your package.json, make sure there is a start script to run your +application:

{
"scripts": {
"start": "platformatic db"
}
}

Before deploying, make sure a .dockerignore file is created:

cp .gitignore .dockerignore

Finally, deploy the application to Fly by running:

fly deploy
+ + + + \ No newline at end of file diff --git a/docs/next/guides/deployment/index.html b/docs/next/guides/deployment/index.html new file mode 100644 index 00000000000..a5c19427331 --- /dev/null +++ b/docs/next/guides/deployment/index.html @@ -0,0 +1,46 @@ + + + + + +Deployment | Platformatic Open Source Software + + + + + +
+
Version: Next

Deployment

Applications built with Platformatic DB can be deployed to a hosting service +in the same way as any other Node.js application. This guide covers a few +things that will help smooth the path from development to production.

Running a Platformatic DB application

Make the Platformatic CLI available

To run a Platformatic DB application, the Platformatic CLI must be available +in the production environment. The most straightforward way of achieving this +is to install it as a project dependency. +This means that when npm install (or npm ci) is run as part of your +build/deployment process, the Platformatic CLI will be installed.

Define an npm run script

A number of hosting services will automatically detect if your project's +package.json has a start npm run script. They will then execute the command +npm start to run your application in production.

You can add platformatic db start as the command for your project's start +npm run script, for example:

{
...
"scripts": {
"start": "platformatic db start",
},
}

Server configuration

info

See the Configuration reference for all +configuration settings.

Configuration with environment variables

We recommend that you use environment variable placeholders +in your Platformatic DB configuration. This will allow you to configure +different settings in your development and production environments.

In development you can set the environment variables via a .env file +that will be automatically loaded by Platformatic DB. For example:

PORT=3042
PLT_SERVER_HOSTNAME=127.0.0.1

In production your hosting provider will typically provide their own mechanism +for setting environment variables.

Configure the server port

Configure the port that the server will listen on by setting an environment +variable placeholder in your Platformatic DB configuration file:

platformatic.db.json
{
"server": {
...
"port": "{PORT}"
},
...
}

Listen on all network interfaces

Most hosting providers require that you configure your server to bind to all +available network interfaces. To do this you must set the server hostname to +0.0.0.0.

This can be handled with an environment variable placeholder in your Platformatic +DB configuration file:

platformatic.db.json
{
"server": {
...
"hostname": "{PLT_SERVER_HOSTNAME}",
},
...
}

The environment variable PLT_SERVER_HOSTNAME should then be set to 0.0.0.0 +in your hosting environment.

Security considerations

We recommend disabling the GraphiQL web UI in production. It can be disabled +with the following configuration:

platformatic.db.json
{
"db": {
...
"graphql": {
"graphiql": false
}
},
...
}

If you want to use this feature in development, replace the configuration +values with environment variable placeholders +so you can set it to true in development and false in production.

Removing the welcome page

If you want to remove the welcome page, you should register an index route.

module.exports = async function (app) {
// removing the welcome page
app.get('/', (req, reply) => {
return { hello: 'world' }
})
}

Databases

Applying migrations

If you're running a single instance of your application in production, it's +best to allow Platformatic DB to automatically run migrations when the server +starts is. This reduces the chance of a currently running instance using a +database structure it doesn't understand while the new version is still being +deployed.

SQLite

When using an SQLite database, you can ensure you don’t commit it to your Git +repository by adding the SQLite database filename to your .gitignore file. +The SQLite database file will be automatically generated by Platformatic DB +when your application migrations are run in production.

+ + + + \ No newline at end of file diff --git a/docs/next/guides/dockerize-platformatic-app/index.html b/docs/next/guides/dockerize-platformatic-app/index.html new file mode 100644 index 00000000000..9e7fc607f02 --- /dev/null +++ b/docs/next/guides/dockerize-platformatic-app/index.html @@ -0,0 +1,20 @@ + + + + + +Dockerize a Platformatic App | Platformatic Open Source Software + + + + + +
+
Version: Next

Dockerize a Platformatic App

This guide explains how to create a new Platformatic DB app, which connects to a PostgreSQL database.

We will then create a docker-compose.yml file that will run both services in separate containers

Generate a Platformatic DB App

Run this command in your terminal to start the Platformatic creator wizard:

npm create platformatic@latest

This interactive command-line tool will ask you some questions about how you'd +like to set up your new Platformatic project. For this guide, select these options:

- Which kind of project do you want to create?  => DB
- Where would you like to create your project? => quick-start
- Do you want to create default migrations? => Yes
- Do you want to create a plugin? => Yes
- Do you want to use TypeScript? => No
- Do you want to install dependencies? => Yes (this can take a while)
- Do you want to apply the migrations? => Yes
- Do you want to generate types? => Yes
- Do you want to create the github action to deploy this application to Platformatic Cloud dynamic workspace? => No
- Do you want to create the github action to deploy this application to Platformatic Cloud static workspace? => No

Once the wizard is complete, you'll have a Platformatic app project in the +folder quick-start, with example migration files and a plugin script.

info

Make sure you run the npm/yarn/pnpm command install command manually if you +don't ask the wizard to do it for you.

Create Docker image for the Platformatic DB App

In this step you are going to create some files into the root project directory

  • .dockerignore - This file tells Docker to ignore some files when copying the directory into the image filesystem
node_modules
.env*
  • start.sh - This is our entrypoint. We will run migrations then start platformatic
#!/bin/sh

echo "Running migrations..." && \
npx platformatic db migrations apply && \
echo "Starting Platformatic App..." && \
npm start
info

Make sure you make this file executable with the command chmod +x start.sh

  • Dockerfile - This is the file Docker uses to create the image
FROM node:18-alpine
WORKDIR /usr/src/app
COPY . .
RUN npm install
COPY . .
EXPOSE 3042
CMD [ "./start.sh" ]

At this point you can build your Docker image with the command

$ docker build -t platformatic-app .

Create Docker Compose config file

docker-compose.yml is the configuration file for docker-compose which will spin up containers for both PostgresSQL and our Platformatic App

version: "3.3"
services:
postgresql:
ports:
- "5433:5432"
image: "postgres:15-alpine"
environment:
- POSTGRES_PASSWORD=postgres
platformatic:
ports:
- "3042:3042"
image: 'platformatic-app:latest'
depends_on:
- postgresql
links:
- postgresql
environment:
PLT_SERVER_HOSTNAME: ${PLT_SERVER_HOSTNAME}
PORT: ${PORT}
PLT_SERVER_LOGGER_LEVEL: ${PLT_SERVER_LOGGER_LEVEL}
DATABASE_URL: postgres://postgres:postgres@postgresql:5432/postgres

A couple of things to notice:

  • The Platformatic app is started only once the database container is up and running (depends_on).
  • The Platformatic app is linked with postgresql service. Meaning that inside its container ping postgresql will be resolved with the internal ip of the database container.
  • The environment is taken directly from the .env file created by the wizard

You can now run your containers with

$ docker-compose up # (-d if you want to send them in the background)

Everything should start smoothly, and you can access your app pointing your browser to http://0.0.0.0:3042

To stop the app you can either press CTRL-C if you are running them in the foreground, or, if you used the -d flag, run

$ docker-compose down
+ + + + \ No newline at end of file diff --git a/docs/next/guides/generate-frontend-code-to-consume-platformatic-rest-api/index.html b/docs/next/guides/generate-frontend-code-to-consume-platformatic-rest-api/index.html new file mode 100644 index 00000000000..3970b82f881 --- /dev/null +++ b/docs/next/guides/generate-frontend-code-to-consume-platformatic-rest-api/index.html @@ -0,0 +1,32 @@ + + + + + +Generate Front-end Code to Consume Platformatic REST API | Platformatic Open Source Software + + + + + +
+
Version: Next

Generate Front-end Code to Consume Platformatic REST API

By default, a Platformatic app exposes REST API that provide CRUD (Create, Read, +Update, Delete) functionality for each entity (see the +Introduction to the REST API +documentation for more information on the REST API).

Platformatic CLI allows to auto-generate the front-end code to import in your +front-end application to consume the Platformatic REST API.

This guide

  • Explains how to create a new Platformatic app.
  • Explains how to configure the new Platformatic app.
  • Explains how to create a new React or Vue.js front-end application.
  • Explains how to generate the front-end TypeScript code to consume the Platformatic app REST API.
  • Provide some React and Vue.js components (either of them written in TypeScript) that read, create, and update an entity.
  • Explains how to import the new component in your front-end application.

Create a new Platformatic app

Run this command in your terminal to start the Platformatic creator wizard:

npm create platformatic@latest

This interactive command-line tool will ask you some questions about how you'd +like to set up your new Platformatic project. For this guide, select these options:

- Which kind of project do you want to create?  => DB
- Where would you like to create your project? => quick-start
- Do you want to create default migrations? => Yes
- Do you want to create a plugin? => Yes
- Do you want to use TypeScript? => No
- Do you want to install dependencies? => Yes (this can take a while)
- Do you want to apply the migrations? => Yes
- Do you want to generate types? => Yes
- Do you want to create the github action to deploy this application to Platformatic Cloud dynamic workspace? => No
- Do you want to create the github action to deploy this application to Platformatic Cloud static workspace? => No

Once the wizard is complete, you'll have a Platformatic app project in the +folder quick-start, with example migration files and a plugin script.

info

Make sure you run the npm/yarn/pnpm command install command manually if you +don't ask the wizard to do it for you.

Configure the new Platformatic app

documentation to create a new Platformatic app. Every Platformatic app uses the "Movie" demo entity and includes +the corresponding table, migrations, and REST API to create, read, update, and delete movies.

Once the new Platformatic app is ready:

  • Set up CORS in platformatic.db.json
{
"$schema": "https://platformatic.dev/schemas/v0.24.0/db",
"server": {
"hostname": "{PLT_SERVER_HOSTNAME}",
"port": "{PORT}",
"logger": {
"level": "{PLT_SERVER_LOGGER_LEVEL}"
},
+ "cors": {
+ "origin": {
+ "regexp": "/*/"
+ }
+ }
},
...
}

You can find more details about the cors configuration here.

  • launch Platformatic through npm start. +Then, the Platformatic app should be available at the http://127.0.0.1:3042/ URL.

Create a new Front-end Application

Refer to the Scaffolding Your First Vite Project +documentation to create a new front-end application, and call it "rest-api-frontend".

info

Please note Vite is suggested only for practical reasons, but the bundler of choice does not make any difference.

If you are using npm 7+ you should run

npm create vite@latest rest-api-frontend -- --template react-ts

and then follow the Vite's instructions

Scaffolding project in /Users/noriste/Sites/temp/platformatic/rest-api-frontend...

Done. Now run:

cd rest-api-frontend
npm install
npm run dev

Once done, the front-end application is available at http://localhost:5174/.

Generate the front-end code to consume the Platformatic app REST API

Now that either the Platformatic app and the front-end app are running, go to the front-end codebase and run the Platformatic CLI

cd rest-api-frontend/src
npx platformatic frontend http://127.0.0.1:3042 ts

Refer to the Platformatic CLI frontend command +documentation to know about the available options.

The Platformatic CLI generates

  • api.d.ts: A TypeScript module that includes all the OpenAPI-related types. +Here is part of the generated code
interface GetMoviesRequest {
'limit'?: number;
'offset'?: number;
// ... etc.
}

interface GetMoviesResponseOK {
'id'?: number;
'title': string;
}


// ... etc.

export interface Api {
setBaseUrl(baseUrl: string): void;
getMovies(req: GetMoviesRequest): Promise<Array<GetMoviesResponseOK>>;
createMovie(req: CreateMovieRequest): Promise<CreateMovieResponseOK>;
// ... etc.
}
  • api.ts: A TypeScript module that includes a typed function for every single OpenAPI endpoint. +Here is part of the generated code
import type { Api } from './api-types'

let baseUrl = ''
export function setBaseUrl(newUrl: string) { baseUrl = newUrl };

export const createMovie: Api['createMovie'] = async (request) => {
const response = await fetch(`${baseUrl}/movies/`, {
method:'post',
body: JSON.stringify(request),
headers: {
'Content-Type': 'application/json'
}
})

if (!response.ok) {
throw new Error(await response.text())
}

return await response.json()
}

// etc.

You can add a --name option to the command line to provide a custom name for the generated files.

cd rest-api-frontend/src
npx platformatic frontend --name foobar http://127.0.0.1:3042 ts

will generated foobar.ts and foobar-types.d.ts

React and Vue.js components that read, create, and update an entity

You can copy/paste the following React or Vue.js components that import the code +the Platformatic CLI generated.

Create a new file src/PlatformaticPlayground.tsx and copy/paste the following code.

import { useEffect, useState } from 'react'

// getMovies, createMovie, and updateMovie are all functions automatically generated by Platformatic
// in the `api.ts` module.
import { getMovies, createMovie, updateMovie, setBaseUrl } from './api'

setBaseUrl('http://127.0.0.1:3042') // configure this according to your needs

export function PlatformaticPlayground() {
const [movies, setMovies] = useState<Awaited<ReturnType<typeof getMovies>>>([])
const [newMovie, setNewMovie] = useState<Awaited<ReturnType<typeof createMovie>>>()

async function onCreateMovie() {
const newMovie = await createMovie({ title: 'Harry Potter' })
setNewMovie(newMovie)
}

async function onUpdateMovie() {
if (!newMovie || !newMovie.id) return

const updatedMovie = await updateMovie({ id: newMovie.id, title: 'The Lord of the Rings' })
setNewMovie(updatedMovie)
}

useEffect(() => {
async function fetchMovies() {
const movies = await getMovies({})
setMovies(movies)
}

fetchMovies()
}, [])

return (
<>
<h2>Movies</h2>

{movies.length === 0 ? (
<div>No movies yet</div>
) : (
<ul>
{movies.map((movie) => (
<li key={movie.id}>{movie.title}</li>
))}
</ul>
)}

<button onClick={onCreateMovie}>Create movie</button>
<button onClick={onUpdateMovie}>Update movie</button>

{newMovie && <div>Title: {newMovie.title}</div>}
</>
)
}

Import the new component in your front-end application

You need to import and render the new component in the front-end application.

Change the App.tsx as follows

import { useState } from 'react'
import reactLogo from './assets/react.svg'
import viteLogo from '/vite.svg'
import './App.css'

+import { PlatformaticPlayground } from './PlatformaticPlayground'

function App() {
const [count, setCount] = useState(0)

return (
<>
+ <PlatformaticPlayground />
<div>
<a href="https://vitejs.dev" target="_blank">
<img src={viteLogo} className="logo" alt="Vite logo" />
</a>
<a href="https://react.dev" target="_blank">
<img src={reactLogo} className="logo react" alt="React logo" />
</a>
</div>
<h1>Vite + React</h1>
<div className="card">
<button onClick={() => setCount((count) => count + 1)}>count is {count}</button>
<p>
Edit <code>src/App.tsx</code> and save to test HMR
</p>
</div>
<p className="read-the-docs">Click on the Vite and React logos to learn more</p>
</>
)
}

export default App

Have fun

Art the top of the front-end application the new component requests the movies to the Platformatic app and list them.

Platformatic frontend guide: listing the movies

Click on "Create movie" to create a new movie called "Harry Potter".

Platformatic frontend guide: creating a movie

Click on "Update movie" to rename "Harry Potter" into "Lord of the Rings".

Platformatic frontend guide: editing a movie

Reload the front-end application to see the new "Lord of the Rings" movie listed.

Platformatic frontend guide: listing the movies +.

+ + + + \ No newline at end of file diff --git a/docs/next/guides/jwt-auth0/index.html b/docs/next/guides/jwt-auth0/index.html new file mode 100644 index 00000000000..b8d5a36a80d --- /dev/null +++ b/docs/next/guides/jwt-auth0/index.html @@ -0,0 +1,21 @@ + + + + + +Configure JWT with Auth0 | Platformatic Open Source Software + + + + + +
+
Version: Next

Configure JWT with Auth0

Auth0 is a powerful authentication and authorization service provider that can be integrated with Platformatic DB through JSON Web Tokens (JWT) tokens. +When a user is authenticated, Auth0 creates a JWT token with all necessary security informations and custom claims (like the X-PLATFORMATIC-ROLE, see User Metadata) and signs the token.

Platformatic DB needs the correct public key to verify the JWT signature. +The fastest way is to leverage JWKS, since Auth0 exposes a JWKS endpoint for each tenant. +Given a Auth0 tenant's issuer URL, the (public) keys are accessible at ${issuer}/.well-known/jwks.json. +For instance, if issuer is: https://dev-xxx.us.auth0.com/, the public keys are accessible at https://dev-xxx.us.auth0.com/.well-known/jwks.json

To configure Platformatic DB authorization to use JWKS with Auth0, set:


...
"authorization": {
"jwt": {
"jwks": {
"allowedDomains": [
"https://dev-xxx.us.auth0.com/"
]
}
},
}
...

danger

Note that specify allowedDomains is critical to correctly restrict the JWT that MUST be issued from one of the allowed domains.

Custom Claim Namespace

In Auth0 there are restrictions about the custom claim that can be set on access tokens. One of these is that the custom claims MUST be namespaced, i.e. we cannot have X-PLATFORMATIC-ROLE but we must specify a namespace, e.g.: https://platformatic.dev/X-PLATFORMATIC-ROLE

To map these claims to user metadata removing the namespace, we can specify the namespace in the JWT options:

...
"authorization": {
"jwt": {
"namespace": "https://platformatic.dev/",
"jwks": {
"allowedDomains": [
"https://dev-xxx.us.auth0.com/"
]
}
},
}
...

With this configuration, the https://platformatic.dev/X-PLATFORMATIC-ROLE claim is mapped to X-PLATFORMATIC-ROLE user metadata.

+ + + + \ No newline at end of file diff --git a/docs/next/guides/migrating-express-app-to-platformatic-service/index.html b/docs/next/guides/migrating-express-app-to-platformatic-service/index.html new file mode 100644 index 00000000000..d1c1150a887 --- /dev/null +++ b/docs/next/guides/migrating-express-app-to-platformatic-service/index.html @@ -0,0 +1,17 @@ + + + + + +Migrating an Express app to Platformatic Service | Platformatic Open Source Software + + + + + +
+
Version: Next

Migrating an Express app to Platformatic Service

Introduction

Our open-source tools are built on top of the modern and flexible Fastify web framework. It provides logging, request validation and a powerful plugin system out-of-the-box, as well as incredible performance.

If you have an existing Express application, migrating it to Fastify could potentially be time consuming, and might not be something that you're able to prioritise right now. You can however still take advantage of Fastify and our open-source tools. In this guide you'll learn how to use the @fastify/express plugin to help you rapidly migrate your existing Express application to use Platformatic Service.

This guide assumes that you have some experience building applications with the Express framework.

Example Express application

For the purpose of this guide, we have a basic example Express application. Although this app has a specific structure, the migration steps covered in this guide can generally be applied to any Express application.

The code for the example Express and migrated Platformatic Service applications is available on GitHub.

Here's the structure of the example Express application:

├── app.js
├── package.json
├── routes
│ └── users.js
└── server.js

It has the following dependencies:

// package.json

"dependencies": {
"express": "^4.18.2"
}

The application has routes in routes/users.js:

// routes/users.js

import express from 'express'

const router = express.Router()

router.use(express.json())

router.post('/', function createUser(request, response, next) {
const newUser = request.body

if (!newUser) {
return next(new Error('Error creating user'))
}

response.status(201).json(newUser)
})

router.get('/:user_id', function getUser(request, response, next) {
const user = {
id: Number(request.params.user_id),
first_name: 'Bobo',
last_name: 'Oso'
}

response.json(user)
})

export const usersRoutes = router

In app.js, we have a factory function that creates a new Express server instance and mounts the routes:

// app.js

import express from 'express'

import { usersRoutes } from './routes/users.js'

export default function buildApp() {
const app = express()

app.use('/users', usersRoutes)

return app
}

And in server.js we're calling the factory function and starting the server listening for HTTP requests:

// server.js

import buildApp from './app.js'

const express = buildApp()

express.listen(3042, () => {
console.log('Example app listening at http://localhost:3042')
})

The routes in your Express application should be mounted on an Express router (or multiple routers if needed). This will allow them to be mounted using @fastify/express when you migrate your app to Platformatic Service.

Creating a new Platformatic Service app

To migrate your Express app to Platformatic Service, create a new Platformatic Service app with:

npm create platformatic@latest

Be sure to select Service as the project type. You should also say yes when you're asked if you want to create the GitHub Actions workflows for deploying your application to Platformatic Cloud.

Once the project has been created, you can delete the example plugins and routes directories.

Using ES modules

If you're using ES modules in the Express application code that you'll be migrating, ensure that there's a type field in package.json set to module:

npm pkg set type=module

Migrate the Express routes

Copy over the routes directory from your Express app.

Install @fastify/express

Install the @fastify/express Fastify plugin to add full Express compability to your Platformatic Service app:

npm install @fastify/express

Mounting the Express routes

Create a root Fastify plugin that register's the @fastify/express plugin and loads your Express routes:

// root-plugin.js

import { usersRoutes } from './routes/users.js'

/** @param {import('fastify').FastifyInstance} app */
export default async function (app) {
await app.register(import('@fastify/express'))

app.use('/users', usersRoutes)
}

Configuring the Platformatic Service app

Edit your app's platformatic.service.json to load your root plugin:

// platformatic.service.json

{
...,
"plugins": {
"paths": [{
"path": "./root-plugin.js",
"encapsulate": false
}],
"hotReload": false
},
"watch": false
}

These settings are important when using @fastify/express in a Platformatic Service app:

  • encapsulate — You'll need to disable encapsulation for any Fastify plugin which mounts Express routes. This is due to the way that @fastify/express works.
  • hotReload and watch — You'll need to disable hot reloading and watching for your app, as they don't currently work when using @fastify/express. This is a known issue that we're working to fix.

Wrapping up

You can learn more about building Node.js apps with Platformatic service in the Platformatic Service documentation.

Once you've migrated your Express app to use Platformatic Service with @fastify/express, you might then want to consider fully migrating your Express routes and application code to Fastify. This tutorial shows how you can approach that migration process: How to migrate your app from Express to Fastify (video).

+ + + + \ No newline at end of file diff --git a/docs/next/guides/migrating-fastify-app-to-platformatic-service/index.html b/docs/next/guides/migrating-fastify-app-to-platformatic-service/index.html new file mode 100644 index 00000000000..3654dce93b1 --- /dev/null +++ b/docs/next/guides/migrating-fastify-app-to-platformatic-service/index.html @@ -0,0 +1,17 @@ + + + + + +Migrating a Fastify app to Platformatic Service | Platformatic Open Source Software + + + + + +
+
Version: Next

Migrating a Fastify app to Platformatic Service

Introduction

Building production ready Node.js application with Fastify can require a certain amount of boilerplate code. This is a side effect of some of Fastify's technical principles:

  • If it can be a plugin, it should be a pluginPlugins help with the separation of concerns, they improve testability, and also provide a way to logically organise and structure your applications.
  • Developer choice = developer freedom — Fastify only applies a few strong opinions, in key areas such as logging and validation. The framework features have been designed to give you the freedom to build your applications however you want.
  • You know your needs best — Fastify doesn't make assumptions about what plugins you'll need in your application. As the Fastify plugin ecosystem and the community has grown, a clear group of popular plugin choices has emerged.

Platformatic Service is the natural evolution of the build-it-from-scratch Fastify development experience. It provides a solid foundation for building Node.js applications on top of Fastify, with best practices baked in.

See the Building apps with Platformatic Service section of this guide to learn more about the built-in features.

The good news is that the path to migrate a Fastify application to use Platformatic Service is fairly straightforward. This guide covers some of the things you'll need to know when migrating an application, as well as tips on different migration approaches.

This guide assumes that you have some experience building applications with the Fastify framework. If you'd like to learn more about about building web applications with Fastify, we recommend taking a look at:

Example Fastify application

For the purpose of this guide, we have a basic example Fastify application. Although this app has a specific structure, the migration steps covered in this guide can generally be applied to any Fastify application.

The code for the example Fastify and migrated Platformatic Service applications is available on GitHub.

Here's the structure of the example Fastify application:

├── app.js
├── package.json
├── plugins
│   └── data-source.js
├── routes
│   ├── movies.js
│   └── quotes.js
├── server.js
└── test
└── routes.test.js

It has the following dependencies:

// package.json

"dependencies": {
"fastify": "^4.17.0",
"fastify-plugin": "^4.5.0"
}

The application has a plugin that decorates the Fastify server instance, as well as two Fastify plugins which define API routes. Here's the code for them:

// plugins/data-source.js

import fastifyPlugin from 'fastify-plugin'

/** @param {import('fastify').FastifyInstance} app */
async function dataSource (app) {
app.decorate('movies', [
'Jaws',
'Star Wars',
'The Wizard of Oz'
])

app.decorate('quotes', [
'You\'re gonna need a bigger boat.',
'May the Force be with you.',
'Toto, I\'ve got a feeling we\'re not in Kansas anymore.'
])
}

export default fastifyPlugin(dataSource)

fastify-plugin is used to to prevent Fastify from creating a new encapsulation context for the plugin. This makes the decorators that are registered in the dataSource plugin available in the route plugins. You can learn about this fundamental Fastify concept in the Fastify Encapsulation documentation.

// routes/movies.js

/** @param {import('fastify').FastifyInstance} app */
export default async function movieRoutes (app) {
app.get('/', async (request, reply) => {
return app.movies
})
}
// routes/quotes.js

/** @param {import('fastify').FastifyInstance} app */
export default async function quotesRoutes (app) {
app.get('/', async (request, reply) => {
return app.quotes
})
}

The route plugins aren't registering anything that needs to be available in other plugins. They have their own encapsulation context and don't need to be wrapped with fastify-plugin.

There's also a buildApp() factory function in app.js, which takes care of creating a new Fastify server instance and registering the plugins and routes:

// app.js

import fastify from 'fastify'

export async function buildApp (options = {}) {
const app = fastify(options)

app.register(import('./plugins/data-source.js'))

app.register(import('./routes/movies.js'), { prefix: '/movies' })
app.register(import('./routes/quotes.js'), { prefix: '/quotes' })

return app
}

And server.js, which calls the buildApp function to create a new Fastify server, and then starts it listening:

// server.js

import { buildApp } from './app.js'

const port = process.env.PORT || 3042
const host = process.env.HOST || '127.0.0.1'

const options = {
logger: {
level: 'info'
}
}

const app = await buildApp(options)

await app.listen({ port, host })

As well as a couple of tests for the API routes:

// tests/routes.test.js

import { test } from 'node:test'
import assert from 'node:assert/strict'

import { buildApp } from '../app.js'

test('Basic API', async (t) => {
const app = await buildApp()

t.after(async () => {
await app.close()
})

await t.test('GET request to /movies route', async () => {
const response = await app.inject({
method: 'GET',
url: '/movies'
})

assert.equal(response.statusCode, 200)
assert.deepEqual(response.json(), [
'Jaws',
'Star Wars',
'The Wizard of Oz'
])
})

await t.test('GET request to /quotes route', async () => {
const response = await app.inject({
method: 'GET',
url: '/quotes'
})

assert.equal(response.statusCode, 200)
assert.deepEqual(response.json(), [
'You\'re gonna need a bigger boat.',
'May the Force be with you.',
'Toto, I\'ve got a feeling we\'re not in Kansas anymore.'
])
})
})

These tests are using the built in Node.js test runner, node:test. They can be run with the command: node --test --test-reporter=spec test/*.test.js.

The @param lines in this application code are JSDoc blocks that import the FastifyInstance type. This allows many code editors to provide auto-suggest, type hinting and type checking for your code.

Creating a new Platformatic Service app

To migrate your Fastify app to Platformatic Service, create a new Platformatic Service app with:

npm create platformatic@latest

Be sure to select Service as the project type. Once the project has been created, you can delete the example plugins and routes directories.

App configuration

The configuration for the Platformatic Service app is stored in platformatic.service.json.

The generated configuration is set up to load plugins from the plugins and routes directories:

// platformatic.service.json

"plugins": {
"paths": [
"./plugins",
"./routes"
]
}

The value for any configuration setting in platformatic.service.json can be replaced with an environment variable by adding a placeholder, for example {PLT_SERVER_LOGGER_LEVEL}. In development, environment variables are automatically loaded by your Platformatic Service app from a .env file in the root directory of your app. In production, you'll typically set these environment variables using a feature provided by your hosting provider.

See the Platformatic Service documentation for Environment variable placeholders to learn more about how this works.

Using ES modules

If you're using ES modules in the Fastify application code that you'll be migrating, ensure that there's a type field in package.json set to module:

npm pkg set type=module

Refactoring Fastify server factories

If your Fastify application has a script with a factory function to create and build up a Fastify server instance, you can refactor it into a Fastify plugin and use it in your Platformatic Service app.

Here are a few things to consider while refactoring it:

  • Move the options you're passing to Fastify when creating a new server instance to the server block in platformatic.service.json. These options will be passed through directly by Platformatic Service when it creates a Fastify server instance.
  • You can create a root plugin to be loaded by your Platformatic Service app, for example: export default async function rootPlugin (app, options) { ... }
  • When you copy the code from your factory function into your root plugin, remove the code which is creating the Fastify server instance.
  • You can configure your Platformatic Service to load the root plugin, for example:
    "plugins": {
    "paths": ["./root-plugin.js"]
    }
  • If you need to pass options to your root plugin, you can do it like this:
    "plugins": {
    "paths": [
    {
    "path": "./root-plugin.js",
    "options": {
    "someOption": true
    }
    }
    ]
    }

Migrating plugins

Copy over the plugins directory from your Fastify app. You shouldn't need to make any modifications for them to work with Platformatic Service.

Disabling plugin encapsulation

Platformatic Service provides a configuration setting which enables you to disable encapsulation for a plugin, or all the plugins within a directory. This will make any decorators or hooks that you set in those plugins available to all other plugins. This removes the need for you to wrap your plugins with fastify-plugin.

To disable encapsulation for all plugins within the plugins directory, you would set your plugins configuration like this in platformatic.service.json:

// platformatic.service.json

"plugins": {
"paths": [
{
"path": "./plugins",
"encapsulate": false
},
"./routes"
]
}

You can learn more about plugin encapsulation in the Fastify Plugins Guide.

Migrating routes

Copy over the routes directory from your Fastify app.

Explicit route paths

If you're registering routes in your Fastify application with full paths, for example /movies, you won't need to make any changes to your route plugins.

Route prefixing with file-system based routing

If you're using the prefix option when registering route plugins in your Fastify application, for example:

app.register(import('./routes/movies.js'), { prefix: '/movies' })

You can achieve the same result with Platformatic Service by using file-system based routing. With the following directory and file structure:

routes/
├── movies
│   └── index.js
└── quotes
└── index.js

Assuming that both of the route files register a / route, these are the route paths that will be registered in your Platformatic Service app:

/movies
/quotes

With the example Fastify application, this would mean copying the route files over to these places in the Platformatic Service app:

routes/movies.js -> routes/movies/index.js
routes/quotes.js -> routes/quotes/index.js

How does this work? Plugins are loaded with the @fastify/autoload Fastify plugin. The dirNameRoutePrefix plugin option for @fastify/autoload is enabled by default. This means that "routes will be automatically prefixed with the subdirectory name in an autoloaded directory".

If you'd prefer not to use file-system based routing with Platformatic Service, you can add prefixes to the paths for the routes themselves (see Explicit route paths).

Adapting existing usage of @fastify/autoload

If you're using @fastify/autoload in your Fastify application, there are a couple of approaches you can take when migrating the app to Platformatic Service:

  • Configure plugins in your Platformatic Service app's platformatic.service.json. It will then take care of loading your routes and plugins for you with @fastify/autoload (configuration documentation).
  • You can continue to use @fastify/autoload directly with a little refactoring. See the tips in the Refactoring Fastify server factories section.

Migrating tests

You'll generally use the Platformatic CLI to start your Platformatic Service app (npx platformatic start). However for testing, you can use the programmatic API provided by Platformatic Service. This allows you to load your app in your test scripts and then run tests against it.

If you copy over the tests from your existing Fastify app, they will typically only require a small amount of refactoring to work with Platformatic Service.

Replacing your Fastify server factory function

The example Fastify app has a buildApp() factory function which creates a Fastify server instance. The import line for that function can be removed from tests/routes.test.js:

// tests/routes.test.js

import { buildApp } from '../app.js'

And replaced with an import of the buildServer() function from @platformatic/service:

// tests/routes.test.js

import { buildServer } from '@platformatic/service'

You can then load your Platformatic Service app like this:


const app = await buildServer('./platformatic.service.json')

Disabling server logging in your tests

If you have logged enabled for your Platformatic Service app, you'll probably want to disable the logging in your tests to remove noise from the output that you receive when you run your tests.

Instead of passing the path to your app's configuration to buildServer(), you can import the app configuration and disable logging:

// tests/routes.test.js

import serviceConfig from '../platformatic.service.json' assert { type: 'json' }

serviceConfig.server.logger = false

Then pass that serviceConfig configuration object to the buildServer() function:

// tests/routes.test.js

const app = await buildServer(serviceConfig)

Import assertions — the assert { type: 'json' } syntax — are not a stable feature of the JavaScript language, so you'll receive warning messages from Node.js when running your tests. You can disable these warnings by passing the --no-warnings flag to node.

Building apps with Platformatic Service

Because Platformatic Service is built on top of the Fastify framework, you're able to use the full functionality of the Fastify framework in your Platformatic Service app. This includes:

  • Fast, structured logging, provided by Pino
  • Request validation with JSON Schema and Ajv (other validation libraries are supported too)
  • Hooks, which allow fine grained control over when code is run during the request/response lifecycle.
  • Decorators, which allow you to customize core Fastify objects and write more modular code.

Platformatic Service also provides many other features that are built on top of Fastify.

Application features

All Platformatic Service features are fully configurable via platformatic.service.json.

Development features

  • Hot reloading — Your server will automatically reload in development as you develop features.
  • Write your plugins in JavaScript or TypeScript — TypeScript support is provided out-of-the-box and supports hot reloading.
  • Pretty printed logs — Making it easier to understand and debug your application during development.

See the Platformatic Service Configuration documentation for all of the features which can be configured.

Next steps

The documentation for Platformatic Service is a helpful reference when building a Platformatic Service app.

Watch: Understand the parts of a Platformatic app

You want to be confident that you understand how your applications work. In this video you'll learn about the parts that make up a Platformatic application, what each part does, and how they fit together.

Our series of Platformatic How-to videos can help get you up and running building apps with Platformatic open-source tools.

Got questions or need help migrating your Fastify app to use Platformatic Service? Drop by our Discord server and we'll be happy to help you.

+ + + + \ No newline at end of file diff --git a/docs/next/guides/monitoring/index.html b/docs/next/guides/monitoring/index.html new file mode 100644 index 00000000000..a72b708560a --- /dev/null +++ b/docs/next/guides/monitoring/index.html @@ -0,0 +1,24 @@ + + + + + +Monitoring with Prometheus and Grafana | Platformatic Open Source Software + + + + + +
+
Version: Next

Monitoring with Prometheus and Grafana

Prometheus is open source system and alerting toolkit for monitoring and alerting. It's a time series database that collects metrics from configured targets at given intervals, evaluates rule expressions, displays the results, and can trigger alerts if some condition is observed to be true. +Grafana is an open source visualization and analytics software.

It's a pretty common solution to use Prometheus to collect and store monitoring data, and Grafana to visualize it.

Platformatic can be configured to expose Prometheus metrics:

...
"metrics": {
"port": 9091,
"auth": {
"username": "platformatic",
"password": "mysecret"
}
}
...

In this case, we are exposing the metrics on port 9091 (defaults to 9090), and we are using basic authentication to protect the endpoint. +We can also specify the IP address to bind to (defaults to 0.0.0.0). +Note that the metrics port is not the default in this configuration. This is because if you want to test the integration running both Prometheus and Platformatic on the same host, Prometheus starts on 9090 port too. +All the configuration settings are optional. To use the default settings, set "metrics": true. See the configuration reference for more details.

caution

Use environment variable placeholders in your Platformatic DB configuration file to avoid exposing credentials.

Prometheus Configuration

This is an example of a minimal Prometheus configuration to scrape the metrics from Platformatic:

global:
scrape_interval: 15s
scrape_timeout: 10s
evaluation_interval: 1m
scrape_configs:
- job_name: 'platformatic'
scrape_interval: 2s
metrics_path: /metrics
scheme: http
basic_auth:
username: platformatic
password: mysecret
static_configs:
- targets: ['192.168.69.195:9091']
labels:
group: 'platformatic'

We specify a target configuring the IP address and the port where Platformatic is running, and we specify the username and password to use for basic authentication. The metrics path is the one used by Platformatic. The ip address is not a loopback address so this will work even with Prometheus running in docker on the same host (see below), please change it to your host ip.

To test this configuration, we can run Prometheus locally using docker and docker-compose, so please be sure to have both correctly installed. +Save the above configuration in a file named ./prometheus/prometheus.yml and create a docker-compose.yml:

version: "3.7"

services:
prometheus:
image: prom/prometheus:latest
volumes:
- prometheus_data:/prometheus
- ./prometheus/prometheus.yml:/etc/prometheus/prometheus.yml
command:
- '--config.file=/etc/prometheus/prometheus.yml'
ports:
- '9090:9090'

volumes:
prometheus_data: {}

Then run docker-compose up -d and open http://localhost:9090 in your browser. You should see the Prometheus dashboard, and you can also query the metrics, e.g. {group="platformatic"}. See Prometheus docs for more information on querying and metrics.

Grafana Configuration

Let's see how we can configure Grafana to chart some Platformatics metrics from Prometheus. +Change the docker-compose.yml to add a grafana service:

version: "3.7"
services:

prometheus:
image: prom/prometheus:latest
volumes:
- prometheus_data:/prometheus
- ./prometheus/prometheus.yml:/etc/prometheus/prometheus.yml
command:
- '--config.file=/etc/prometheus/prometheus.yml'
ports:
- '9090:9090'

grafana:
image: grafana/grafana:latest
volumes:
- grafana_data:/var/lib/grafana
environment:
- GF_SECURITY_ADMIN_PASSWORD=pleasechangeme
depends_on:
- prometheus
ports:
- '3000:3000'

volumes:
prometheus_data: {}
grafana_data: {}

In Grafana, select Configuration -> Data Sources -> Add Data Source, and select Prometheus. +In the URL field, specify the URL of the Prometheus server, e.g. http://prometheus:9090 (the name of the service in the docker-compose file), then Save & Test.

Now we can create a dashboard and add panels to it. Select the Prometheus data source, and add queries. You should see the metrics exposed by Platformatic.

It's also possible to import pre-configured dashboards, like this one from Grafana.com.

+ + + + \ No newline at end of file diff --git a/docs/next/guides/packaging-an-application-as-a-module/index.html b/docs/next/guides/packaging-an-application-as-a-module/index.html new file mode 100644 index 00000000000..cd461a44d7a --- /dev/null +++ b/docs/next/guides/packaging-an-application-as-a-module/index.html @@ -0,0 +1,27 @@ + + + + + +Packaging a Platformatic Application as a module | Platformatic Open Source Software + + + + + +
+
Version: Next

Packaging a Platformatic Application as a module

Platformatic Service and Platformatic DB +offer a good starting point to create new applications. However, most developers or organizations might want to +create reusable services or applications built on top of Platformatic. +This is useful to publish the application on the public npm registry (or a private one!), including building your own CLI, +or to create a specialized template for your organization to allow for centralized bugfixes and updates.

This process is the same one we use to maintain Platformatic DB and Platformatic Composer on top of Platformatic Service.

Creating a custom Service

We are creating the module foo.js as follows:

const { schema, platformaticService } = require('@platformatic/service')

/** @type {import('fastify').FastifyPluginAsync<{}>} */
async function foo (app, opts) {
const text = app.platformatic.config.foo.text
app.get('/foo', async (request, reply) => {
return text
})

await platformaticService(app, opts)
}

foo.configType = 'foo'

// break Fastify encapsulation
foo[Symbol.for('skip-override')] = true

// The schema for our configuration file
foo.schema = {
$id: 'https://example.com/schemas/foo.json',
title: 'Foo Service',
type: 'object',
properties: {
server: schema.server,
plugins: schema.plugins,
metrics: schema.metrics,
watch: {
anyOf: [schema.watch, {
type: 'boolean'
}, {
type: 'string'
}]
},
$schema: {
type: 'string'
},
module: {
type: 'string'
},
foo: {
type: 'object',
properties: {
text: {
type: 'string'
}
},
required: ['text']
}
},
additionalProperties: false,
required: ['server']
}

// The configuration for the ConfigManager
foo.configManagerConfig = {
schema: foo.schema,
envWhitelist: ['PORT', 'HOSTNAME'],
allowToWatch: ['.env'],
schemaOptions: {
useDefaults: true,
coerceTypes: true,
allErrors: true,
strict: false
}
}

module.exports = foo

Note that the $id property of the schema identifies the module in our system, +allowing us to retrieve the schema correctly. +It is recommended, but not required, that the JSON schema is actually +published in this location. Doing so allows tooling such as the VSCode +language server to provide autocompletion.

In this example, the schema adds a custom top-level foo property +that users can use to configure this specific module.

ESM is also supported.

Consuming a custom application

Consuming foo.js is simple. We can create a platformatic.json file as follows:

{
"$schema": "https://example.com/schemas/foo.json",
"module": "./foo",
"server": {
"port": 0,
"hostname": "127.0.0.1"
},
"foo": {
"text": "Hello World"
}
}

Note that we must specify both the $schema property and module. +Module can also be any modules published on npm and installed via your package manager.

Building your own CLI

It is possible to build your own CLI with the following cli.mjs file:

import foo from './foo.js'
import { start } from '@platformatic/service'
import { printAndExitLoadConfigError } from '@platformatic/config'

await start(foo, process.argv.splice(2)).catch(printConfigValidationErrors)

This will also load platformatic.foo.json files.

+ + + + \ No newline at end of file diff --git a/docs/next/guides/prisma/index.html b/docs/next/guides/prisma/index.html new file mode 100644 index 00000000000..1add6e0832f --- /dev/null +++ b/docs/next/guides/prisma/index.html @@ -0,0 +1,17 @@ + + + + + +Integrate Prisma with Platformatic DB | Platformatic Open Source Software + + + + + +
+
Version: Next

Integrate Prisma with Platformatic DB

Prisma is an open-source ORM for Node.js and TypeScript. It is used as an alternative to writing SQL, or using another database access tool such as SQL query builders (like knex.js) or ORMs (like TypeORM and Sequelize). Prisma currently supports PostgreSQL, MySQL, SQL Server, SQLite, MongoDB, and CockroachDB.

Prisma can be used with JavaScript or TypeScript, and provides a level to type-safety that goes beyond the guarantees made by other ORMs in the TypeScript ecosystem. You can find an in-depth comparison of Prisma against other ORMs here.

If you want to get a quick overview of how Prisma works, you can follow the Quickstart or read the Introduction in the Prisma documentation.

How Prisma can improve your workflow with Platformatic DB

While Platformatic speeds up development of your REST and GraphQL APIs, Prisma can complement the workflow in several ways:

  1. Provides an intuitive data modeling language
  2. Provides auto-generated and customizable SQL migrations
  3. Provides type-safety and auto-completion for your database queries

You can learn more about why Prisma and Platformatic are a great match this article.

Prerequisites

To follow along with this guide, you will need to have the following:

Setup Prisma

Install the Prisma CLI and the db-diff development dependencies in your project:

npm install --save-dev prisma @ruheni/db-diff

Next, initialize Prisma in your project

npx prisma init

This command does the following:

  • Creates a new directory called prisma which contains a file called schema.prisma. This file defines your database connection and the Prisma Client generator.
  • Creates a .env file at the root of your project if it doesn't exist. This defines your environment variables (used for your database connection).

You can specify your preferred database provider using the --datasource-provider flag, followed by the name of the provider:

npx prisma init --datasource-provider postgresql # or sqlite, mysql, sqlserver, cockroachdb

Prisma uses the DATABASE_URL environment variable to connect to your database to sync your database and Prisma schema. It also uses the variable to connect to your database to run your Prisma Client queries.

If you're using PostgreSQL, MySQL, SQL Server, or CockroachDB, ensure that the DATABASE_URL used by Prisma is the same as the one used by Platformatic DB project. If you're using SQLite, refer to the Using Prisma with SQLite section.

If you have an existing project, refer to the Adding Prisma to an existing Platformatic DB project section. If you're adding Prisma to a new project, refer to the Adding Prisma to a new project.

Adding Prisma to an existing project

If you have an existing Platformatic DB project, you can introspect your database and generate the data model in your Prisma schema with the following command:

npx prisma db pull

The command will introspect your database and generate the data model

Next, add the @@ignore attribute to the versions model to exclude it from the Prisma Client API:

model versions {
version BigInt @id
name String?
md5 String?
run_at DateTime? @db.Timestamptz(6)

+ @@ignore
}

To learn how you can evolve your database schema, you can jump to the Evolving your database schema section.

Adding Prisma to a new project

Define a Post model with the following fields at the end of your schema.prisma file:

prisma/schema.prisma
model Post {
id Int @id @default(autoincrement())
title String
content String?
published Boolean @default(false)
viewCount Int @default(0)
createdAt DateTime @default(now())

@@map("posts")
}

The snippet above defines a Post model with the following fields and properties:

  • id: An auto-incrementing integer that will be the primary key for the model.
  • title: A non-nullable String field.
  • content: A nullable String field.
  • published: A Boolean field with a default value of false.
  • viewCount: An Int field with a default value of 0.
  • createdAt: A DateTime field with a timestamp of when the value is created as its default value.

By default, Prisma maps the model name and its format to the table name — which is also used im Prisma Client. Platformatic DB uses a snake casing and pluralized table names to map your table names to the generated API. The @@map() attribute in the Prisma schema allows you to define the name and format of your table names to be used in your database. You can also use the @map() attribute to define the format for field names to be used in your database. Refer to the Foreign keys and table names naming conventions section to learn how you can automate formatting foreign keys and table names.

Next, run the following command to generate an up and down migration:

npx db-diff

The previous command will generate both an up and down migration based on your schema. The generated migration is stored in your ./migrations directory. If you are currently using a different path to store the migration, you can provide the --migrations-dir flag followed by the path.

You can then apply the generated migration using the Platformatic DB CLI:

npx platformatic db migrations apply

Platformatic uses Postgrator to run migrations. Postgrator creates a table in the database called versions to track the applied migrations. Since the versions table is not yet captured in the Prisma schema, run the following command to introspect the database and populate it with the missing model:

npx prisma db pull

Introspecting the database to populate the model prevents including the versions table in the generated down migrations.

Your Prisma schema should now contain a versions model that is similar to this one (it will vary depending on the database system you're using):

model Post {
id Int @id @default(autoincrement())
title String
content String?
published Boolean @default(false)
viewCount Int @default(0)
createdAt DateTime @default(now())

@@map("posts")
}

+model versions {
+ version BigInt @id
+ name String?
+ md5 String?
+ run_at DateTime? @db.Timestamptz(6)
+}

Add the @@ignore attribute function to the model to exclude it from the Prisma Client API:

model versions {
version BigInt @id
name String?
md5 String?
run_at DateTime? @db.Timestamptz(6)

+ @@ignore
}

Evolving your database schema

Update the data model in your Prisma schema by adding a model or a field:

// based on the schema in the "Adding Prisma to a new project" section
+model User {
+ id Int @id @default(autoincrement())
+ email String @unique
+ name String?
+ posts Post[]
+
+ @@map("users")
+}

model Post {
id Int @id @default(autoincrement())
createdAt DateTime @default(now())
title String
content String?
published Boolean @default(false)
viewCount Int @default(0)
+ author User? @relation(fields: [authorId], references: [id])
+ authorId Int? @map("author_id")

@@map("posts")
}

Next, use the @ruheni/db-diff CLI tool to generate up and down migrations:

npx db-diff

This command will generate up and down migrations based off of your Prisma schema. If you are currently using a different path to store the migration, you can provide the --migrations-dir flag followed by the path.

Next, apply the generated migration using the Platformatic CLI:

npx platformatic db migrations apply

And you're done!

Using Prisma Client in your plugins

Plugins allow you to add custom functionality to your REST and GraphQL API. Refer to the Add Custom Functionality to learn more how you can add custom functionality.

danger

Prisma Client usage with Platformatic is currently only supported in Node v18

You can use Prisma Client to interact with your database in your plugin.

To get started, run the following command:

npx prisma generate

The above command installs the @prisma/client in your project and generates a Prisma Client based off of your Prisma schema.

Install @sabinthedev/fastify-prisma fastify plugin. The plugin takes care of shutting down database connections and makes Prisma Client available as a Fastify plugin.

npm install @sabinthedev/fastify-prisma

Register the plugin and extend your REST API:

// 1.
const prismaPlugin = require("@sabinthedev/fastify-prisma")

module.exports = async (app) => {
app.log.info('plugin loaded')

// 2.
app.register(prismaPlugin)

/**
* Plugin logic
*/
// 3.
app.put('/post/:id/views', async (req, reply) => {

const { id } = req.params

// 4.
const post = await app.prisma.post.update({
where: {
id: Number(id)
},
data: {
viewCount: {
increment: 1
}
}
})

// 5.
return reply.send(post)
})
}

The snippet does the following:

  1. Imports the plugin
  2. Registers the @sabinthedev/fastify-prisma
  3. Defines the endpoint for incrementing the views of a post
  4. Makes a query to the database on the Post model to increment a post's view count
  5. Returns the updated post on success

If you would like to extend your GraphQL API, extend the schema and define the corresponding resolver:

plugin.js
// ./plugin.js
const prismaPlugin = require("@sabinthedev/fastify-prisma")

module.exports = async (app) => {
app.log.info('plugin loaded')

app.graphql.extendSchema(`
extend type Mutation {
incrementPostViewCount(id: ID): Post
}
`)

app.graphql.defineResolvers({
Mutation: {
incrementPostViewCount: async (_, { id }) => {
const post = await prisma.post.update({
where: {
id: Number(id)
},
data: {
viewCount: {
increment: 1
}
}
})

if (!post) throw new Error(`Post with id:${id} was not found`)
return post
}
}
})
}

Start the server:

npx platformatic db start

The query should now be included in your GraphQL schema.

You can also use the Prisma Client in your REST API endpoints.

Workarounds

Using Prisma with SQLite

Currently, Prisma doesn't resolve the file path of a SQLite database the same way as Platformatic does.

If your database is at the root of the project, create a new environment variable that Prisma will use called PRISMA_DATABASE_URL:

# .env
DATABASE_URL="sqlite://db.sqlite"
PRISMA_DATABASE_URL="file:../db.sqlite"

Next, update the url value in the datasource block in your Prisma schema with the updated value:

prisma/schema.prisma
// ./prisma/schema.prisma
datasource db {
provider = "sqlite"
url = env("PRISMA_DATABASE_URL")
}

Running migrations should now work smoothly and the path will be resolved correctly.

Foreign keys, field, and table names naming conventions

Foreign key names should use underscores, e.g. author_id, for Platformatic DB to correctly map relations. You can use the @map("") attribute to define the names of your foreign keys and field names to be defined in the database.

Table names should be mapped to use the naming convention expected by Platformatic DB e.g. @@map("recipes") (the Prisma convention is Recipe, which corresponds with the model name).

You can use prisma-case-format to enforce your own database conventions, i.e., pascal, camel, and snake casing.

Learn more

If you would like to learn more about Prisma, be sure to check out the Prisma docs.

+ + + + \ No newline at end of file diff --git a/docs/next/guides/securing-platformatic-db/index.html b/docs/next/guides/securing-platformatic-db/index.html new file mode 100644 index 00000000000..b0fd013c0cd --- /dev/null +++ b/docs/next/guides/securing-platformatic-db/index.html @@ -0,0 +1,31 @@ + + + + + +Securing Platformatic DB with Authorization | Platformatic Open Source Software + + + + + +
+
Version: Next

Securing Platformatic DB with Authorization

Introduction

Authorization in Platformatic DB is role-based. User authentication and the +assignment of roles must be handled by an external authentication service. +Take a look to at the reference documentation for Authorization.

The goal of this simple guide is to protect an API built with Platformatic DB +with the use of a shared secret, that we call adminSecret. We want to prevent +any user that is not an admin to access the data.

The use of an adminSecret is a simplistic way of securing a system. +It is a crude way for limiting access and not suitable for production systems, +as the risk of leaking the secret is high in case of a security breach. +A production friendly way would be to issue a machine-to-machine JSON Web Token, +ideally with an asymmetric key. Alternatively, you can defer to an external +service via a Web Hook.

Please refer to our guide to set up Auth0 for more information +on JSON Web Tokens.

Block access to all entities, allow admins

The following configuration will block all anonymous users (e.g. each user without a known role) +to access every entity:

{
...
"authorization": {
"adminSecret": "replaceWithSomethingRandomAndSecure"
}
}

The data will still be available if the X-PLATFORMATIC-ADMIN-SECRET HTTP header +is specified when making HTTP calls, like so:

curl -H 'X-PLATFORMATIC-ADMIN-SECRET: replaceWithSomethingRandomAndSecure' http://127.0.0.1:3042/pages
info

Configuring JWT or Web Hooks will have the same result of configuring an admin secret.

Authorization rules

Rules can be provided based on entity and role in order to restrict access and provide fine grained access. +To make an admin only query and save the page table / page entity using adminSecret this structure should be used in the platformatic.db configuration file:

  ...
"authorization": {
"adminSecret": "easy",
"rules": [{
"entity": "movie"
"role": "platformatic-admin",
"find": true,
"save": true,
"delete": false,
}
]
}
info

Note that the role of an admin user from adminSecret strategy is platformatic-admin by default.

Read-only access to anonymous users

The following configuration will allo all anonymous users (e.g. each user without a known role) +to access the pages table / page entity in Read-only mode:

{
...
"authorization": {
"adminSecret": "replaceWithSomethingRandomAndSecure"
"rules": [{
"role": "anonymous",
"entity": "page",
"find": true,
"save": false,
"delete": false
}]
}
}

Note that we set find as true to allow the access, while the other options are false.

Work in Progress

This guide is a Work-In-Progress. Let us know what other common authorization use cases we should cover.

+ + + + \ No newline at end of file diff --git a/docs/next/guides/seed-a-database/index.html b/docs/next/guides/seed-a-database/index.html new file mode 100644 index 00000000000..735de19a930 --- /dev/null +++ b/docs/next/guides/seed-a-database/index.html @@ -0,0 +1,21 @@ + + + + + +Seed a Database | Platformatic Open Source Software + + + + + +
+
Version: Next

Seed a Database

A database is as useful as the data that it contains: a fresh, empty database +isn't always the best starting point. We can add a few rows from our migrations +using SQL, but we might need to use JavaScript from time to time.

The platformatic db seed command allows us to run a +script that will populate — or "seed" — our database.

Example

Our seed script should export a Function that accepts an argument: +an instance of @platformatic/sql-mapper.

seed.js
'use strict'

module.exports = async function ({ entities, db, sql }) {
await entities.graph.save({ input: { name: 'Hello' } })
await db.query(sql`
INSERT INTO graphs (name) VALUES ('Hello 2');
`)
}

We can then run the seed script with the Platformatic CLI:

npx platformatic db seed seed.js
+ + + + \ No newline at end of file diff --git a/docs/next/guides/telemetry/index.html b/docs/next/guides/telemetry/index.html new file mode 100644 index 00000000000..cc035c83366 --- /dev/null +++ b/docs/next/guides/telemetry/index.html @@ -0,0 +1,21 @@ + + + + + +Telemetry with Jaeger | Platformatic Open Source Software + + + + + +
+
Version: Next

Telemetry with Jaeger

Introduction

Platformatic supports Open Telemetry integration. This allows you to send telemetry data to one of the OTLP compatible servers (see here) or to a Zipkin server. Let's show this with Jaeger.

Jaeger setup

The quickest way is to use docker:

docker run -d --name jaeger \
-e COLLECTOR_OTLP_ENABLED=true \
-p 16686:16686 \
-p 4317:4317 \
-p 4318:4318 \
jaegertracing/all-in-one:latest

Check that the server is running by opening http://localhost:16686/ in your browser.

Platformatic setup

Will test this with a Platformatic Composer that proxy requests to a Platformatic Service, which in turn invokes a Platformatic DB Service. +In this way we show that the telemetry is propagated from the Composer throughout the services and the collected correctly. +Let's setup all this components:

Platformatic DB Service

Create a folder for DB and cd into it:

mkdir test-db
cd test-db

Then create a db in the folder using npx create-platformatic@latest:

npx create-platformatic@latest

To make it simple, use sqlite and create/apply the default migrations. This DB Service is exposed on port 5042:


➜ npx create-platformatic@latest

Hello user, welcome to Platformatic 0.32.0!
Let's start by creating a new project.
? Which kind of project do you want to create? DB
? Where would you like to create your project? .
? What database do you want to use? SQLite
? Do you want to use the connection string "sqlite://./db.sqlite"? Confirm
? Do you want to create default migrations? yes
? Do you want to create a plugin? no
? Do you want to use TypeScript? no
? What port do you want to use? 5042
[15:40:46] INFO: Configuration file platformatic.db.json successfully created.
[15:40:46] INFO: Environment file .env successfully created.
[15:40:46] INFO: Migrations folder migrations successfully created.
[15:40:46] INFO: Migration file 001.do.sql successfully created.
[15:40:46] INFO: Migration file 001.undo.sql successfully created.
[15:40:46] INFO: Plugin file created at plugin.js
? Do you want to run npm install? no
? Do you want to apply migrations? yes
...done!
? Do you want to generate types? no
? Do you want to create the github action to deploy this application to Platformatic Cloud dynamic workspace? no
? Do you want to create the github action to deploy this application to Platformatic Cloud static workspace? no

All done! Please open the project directory and check the README.
Will test this in one example with a Platformatic Composer that proxy requests to a Platformatic Service, which in turn invokes a Platformatic DB.

Open the platformatic.db.json file and add the telementry configuration:

  "telemetry": {
"serviceName": "test-db",
"exporter": {
"type": "otlp",
"options": {
"url": "http://localhost:4318/v1/traces"
}
}
}

Finally, start the DB service:

npx platformatic db start

Platformatic Service

Create at the same level of test-db another folder for Service and cd into it:

mkdir test-service
cd test-service

Then create a service on the 5043 port in the folder using npx create-platformatic@latest:

➜ npx create-platformatic@latest

Hello user, welcome to Platformatic 0.32.0!
Let's start by creating a new project.
? Which kind of project do you want to create? Service
? Where would you like to create your project? .
? Do you want to run npm install? no
? Do you want to use TypeScript? no
? What port do you want to use? 5043
[15:55:35] INFO: Configuration file platformatic.service.json successfully created.
[15:55:35] INFO: Environment file .env successfully created.
[15:55:35] INFO: Plugins folder "plugins" successfully created.
[15:55:35] INFO: Routes folder "routes" successfully created.
? Do you want to create the github action to deploy this application to Platformatic Cloud dynamic workspace? no
? Do you want to create the github action to deploy this application to Platformatic Cloud static workspace? no

Open the platformatic.service.json file and add the following telemetry configuration (it's exactly the same as DB, but with a different serviceName)

  "telemetry": {
"serviceName": "test-service",
"exporter": {
"type": "otlp",
"options": {
"url": "http://localhost:4318/v1/traces"
}
}
}

We want this service to invoke the DB service, so we need to add a client for test-db to it:

npx platformatic client http://127.0.0.1:5042 js --name movies

Check platformatic.service.json to see that the client has been added (PLT_MOVIES_URL is defined in .env):

    "clients": [
{
"schema": "movies/movies.openapi.json",
"name": "movies",
"type": "openapi",
"url": "{PLT_MOVIES_URL}"
}
]

Now open routes/root.js and add the following:

  fastify.get('/movies-length', async (request, reply) => {
const movies = await request.movies.getMovies()
return { length: movies.length }
})

This code calls movies to get all the movies and returns the length of the array.

Finally, start the service:

npx platformatic service start

Platformatic Composer

Create at the same level of test-db and test-service another folder for Composer and cd into it:

mkdir test-composer
cd test-composer

Then create a composer on the 5044 port in the folder using npx create-platformatic@latest:

➜ npx create-platformatic@latest

Hello marcopiraccini, welcome to Platformatic 0.32.0!
Let's start by creating a new project.
? Which kind of project do you want to create? Composer
? Where would you like to create your project? .
? What port do you want to use? 5044
? Do you want to run npm install? no
[16:05:28] INFO: Configuration file platformatic.composer.json successfully created.
[16:05:28] INFO: Environment file .env successfully created.
? Do you want to create the github action to deploy this application to Platformatic Cloud dynamic workspace? no
? Do you want to create the github action to deploy this application to Platformatic Cloud static workspace? no

All done! Please open the project directory and check the README.

Open platformatic.composer.js and change it to the following:

{
"$schema": "https://platformatic.dev/schemas/v0.32.0/composer",
"server": {
"hostname": "{PLT_SERVER_HOSTNAME}",
"port": "{PORT}",
"logger": {
"level": "{PLT_SERVER_LOGGER_LEVEL}"
}
},
"composer": {
"services": [
{
"id": "example",
"origin": "http://127.0.0.1:5043",
"openapi": {
"url": "/documentation/json"
}
}
],
"refreshTimeout": 3000
},
"telemetry": {
"serviceName": "test-composer",
"exporter": {
"type": "otlp",
"options": {
"url": "http://localhost:4318/v1/traces"
}
}
},
"watch": true
}

Note that we just added test-service as origin of the proxed service and added the usual telementry configuration, with a different serviceName.

Finally, start the composer:

npx platformatic composer start

Run the Test

Check that the composer is exposing movies-length opening: http://127.0.0.1:5044/documentation/

You should see: +image

To add some data, we can POST directly to the DB service (port 5042):

curl -X POST -H "Content-Type: application/json" -d '{"title":"The Matrix"}' http://127.0.0.1:5042/movies 
curl -X POST -H "Content-Type: application/json" -d '{"title":"The Matrix Reloaded"}' http://127.0.0.1:5042/movies

Now, let's check that the composer (port 5044) is working:

curl http://127.0.0.1:5044/movies-length

If the composer is working correctly, you should see:

{"length":2}

However, the main interest of this example is to show how to use the Platformatic Telemetry, so let's check it. +Open the Jaeger UI at http://localhost:16686/ and you should see something like this:

image

Select on the left the test-composer service and the GET /movies-length operation, click on "Find traces" and you should see something like this:

image

You can then click on the trace and see the details:

image

Note that everytime a request is received or client call is done, a new span is started. So we have:

  • One span for the request received by the test-composer
  • One span for the client call to test-service
  • One span for the request received by test-service
  • One span for the client call to test-db
  • One span for the request received by test-db

All these spans are linked together, so you can see the whole trace.

What if you want to use Zipkin?

Starting from this example, it's also possible to run the same test using Zipkin. To do so, you need to start the Zipkin server:

docker run -d -p 9411:9411 openzipkin/zipkin

Then, you need to change the telemetry configuration in all the platformatic.*.json to the following (only the exporter object is different`)

  "telemetry": {
(...)
"exporter": {
"type": "zipkin",
"options": {
"url": "http://127.0.0.1:9411/api/v2/spans"
}
}
}

The zipkin ui is available at http://localhost:9411/

+ + + + \ No newline at end of file diff --git a/docs/next/platformatic-cloud/deploy-database-neon/index.html b/docs/next/platformatic-cloud/deploy-database-neon/index.html new file mode 100644 index 00000000000..13569cd3e2c --- /dev/null +++ b/docs/next/platformatic-cloud/deploy-database-neon/index.html @@ -0,0 +1,32 @@ + + + + + +Deploy a PostgreSQL database with Neon | Platformatic Open Source Software + + + + + +
+
Version: Next

Deploy a PostgreSQL database with Neon

Neon offers multi-cloud fully managed +Postgres with a generous free tier. They separated storage and +compute to offer autoscaling, branching, and bottomless storage. +It offers a great environment for creating database preview +environments for your Platformatic DB +applications.

This guide shows you how to integrate Neon branch deployments with your +Platformatic app's GitHub Actions workflows. It assumes you have already +followed the Quick Start Guide.

Create a project on Neon

To set up an account with Neon, open their website, sign up and create a +new project.

Take note of the following configuration setting values:

  • The connection string for your main branch database, to be stored in a NEON_DB_URL_PRODUCTION secret
  • The Project ID (available under the project Settings), to be stored in a NEON_PROJECT_ID secret
  • Your API key (available by clicking on your user icon > Account > Developer settings), to be stored under NEON_API_KEY

You can learn more about Neon API keys in their Manage API Keys documentation.

Configure Github Environments and Secrets

Now you need to set the configuration values listed above as +repository secrets +on your project's GitHub repository. +Learn how to use environments for deployment in GitHub's documentation.

Configure the GitHub Environments for your repository to have:

  • production secrets, available only to the main branch:
    • NEON_DB_URL_PRODUCTION
  • previews secrets available to all branches:
    • NEON_PROJECT_ID
    • NEON_API_KEY

Configure the main branch workflow

Replace the contents of your app's workflow for static workspace deployment:

.github/workflows/platformatic-static-workspace-deploy.yml
name: Deploy Platformatic application to the cloud
on:
push:
branches:
- main
paths-ignore:
- 'docs/**'
- '**.md'

jobs:
build_and_deploy:
environment:
name: production
permissions:
contents: read
runs-on: ubuntu-latest
steps:
- name: Checkout application project repository
uses: actions/checkout@v4
- name: npm install --omit=dev
run: npm install --omit=dev
- name: Deploy project
uses: platformatic/onestep@latest
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
platformatic_workspace_id: <YOUR_STATIC_WORKSPACE_ID>
platformatic_workspace_key: ${{ secrets.PLATFORMATIC_STATIC_WORKSPACE_API_KEY }}
platformatic_config_path: ./platformatic.db.json
secrets: DATABASE_URL
env:
DATABASE_URL: ${{ secrets.NEON_DB_URL_PRODUCTION }}
PLT_SERVER_LOGGER_LEVEL: info
PORT: 3042
PLT_SERVER_HOSTNAME: 127.0.0.1

Replace <YOUR_STATIC_WORKSPACE_ID> with the workspace ID that you previously had in this file.

When your app is deployed to the static workspace it will now be configured to connect to the +main branch database for your Neon project.

Configure the preview environment workflow

Neon allows up to 10 database branches on their free tier. You can automatically create a new +database branch when a pull request is opened, and then automatically remove it when the pull +request is merged.

GitHub Action to create a preview environment

Replace the contents of your app's workflow for dynamic workspace deployment:

.github/workflows/platformatic-dynamic-workspace-deploy.yml
name: Deploy to Platformatic cloud
on:
pull_request:
paths-ignore:
- 'docs/**'
- '**.md'

# This allows a subsequently queued workflow run to interrupt previous runs
concurrency:
group: "${{ github.workflow }} @ ${{ github.event.pull_request.head.label || github.head_ref || github.ref }}"
cancel-in-progress: true

jobs:
build_and_deploy:
runs-on: ubuntu-latest
environment:
name: development
steps:
- name: Checkout application project repository
uses: actions/checkout@v4
- name: npm install --omit=dev
run: npm install --omit=dev
- name: Get PR number
id: get_pull_number
run: |
pull_number=$(jq --raw-output .pull_request.number "$GITHUB_EVENT_PATH")
echo "pull_number=${pull_number}" >> $GITHUB_OUTPUT
echo $pull_number
- uses: neondatabase/create-branch-action@v4
with:
project_id: ${{ secrets.NEON_PROJECT_ID }}
branch_name: pr-${{ steps.get_pull_number.outputs.pull_number }}
api_key: ${{ secrets.NEON_API_KEY }}
id: create-branch
- name: Deploy project
uses: platformatic/onestep@latest
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
platformatic_workspace_id: ${{ secrets.PLATFORMATIC_DYNAMIC_WORKSPACE_ID }}
platformatic_workspace_key: ${{ secrets.PLATFORMATIC_DYNAMIC_WORKSPACE_KEY }}
platformatic_config_path: ./platformatic.db.json
env:
DATABASE_URL: ${{ steps.create-branch.outputs.db_url }}
PLT_SERVER_LOGGER_LEVEL: info
PORT: 3042
PLT_SERVER_HOSTNAME: 127.0.0.1

Replace <YOUR_DYNAMIC_WORKSPACE_ID> with the workspace ID that you previously had in this file.

Configure preview environment cleanup

After a pull request to the main branch is merged, you should remove the matching database branch.

Create a new file, .github/workflows/cleanup-neon-branch-db.yml, and copy and paste in the following +workflow configuration:

.github/workflows/cleanup-neon-branch-db.yml
name: Cleanup Neon Database Branch
on:
push:
branches:
- 'main'
jobs:
delete-branch:
environment:
name: development
permissions: write-all
runs-on: ubuntu-latest
steps:
- name: Get PR info
id: get-pr-info
uses: actions-ecosystem/action-get-merged-pull-request@v1.0.1
with:
github_token: ${{secrets.GITHUB_TOKEN}}
- run: |
echo ${{ steps.get-pr-info.outputs.number}}
- name: Delete Neon Branch
if: ${{ steps.get-pr-info.outputs.number }}
uses: neondatabase/delete-branch-action@v3
with:
project_id: ${{ secrets.NEON_PROJECT_ID }}
branch: pr-${{ steps.get-pr-info.outputs.number }}
api_key: ${{ secrets.NEON_API_KEY }}

Deployment

To deploy these changes to your app:

  1. Create a Git branch locally (git checkout -b <BRANCH_NAME>)
  2. Commit your changes and push them to GitHub
  3. Open a pull request on GitHub - a branch will automatically be created for your Neon database and a preview app will be deployed to Platformatic Cloud (in your app's dynamic workspace).
  4. Merge the pull request - the Neon databsase branch will be automatically deleted and your app will be deployed to Platformatic Cloud (in your app's static workspace).
+ + + + \ No newline at end of file diff --git a/docs/next/platformatic-cloud/pricing/index.html b/docs/next/platformatic-cloud/pricing/index.html new file mode 100644 index 00000000000..e748b4a9e12 --- /dev/null +++ b/docs/next/platformatic-cloud/pricing/index.html @@ -0,0 +1,23 @@ + + + + + +Platformatic Cloud Pricing | Platformatic Open Source Software + + + + + +
+
Version: Next

Platformatic Cloud Pricing

Find the plan that works best for you!

FreeBasicAdvancedPro
Pricing$0$4.99$22.45$49.99
Slots01512
CNAME-truetruetrue
Always On-truetruetrue

FAQ

What is a slot?

One slot is equal to one compute unit. The free plan has no always-on +machines and they will be stopped while not in use.

What is a workspace?

A workspace is the security boundary of your deployment. You will use +the same credentials to deploy to one.

A workspace can be either static or dynamic. +A static workspace always deploy to the same domain, while +in a dynamic workspace each deployment will have its own domain. +The latter are useful to provde for pull request previews.

Can I change or upgrade my plan after I start using Platformatic?

Plans can be changed or upgraded at any time

What does it mean I can set my own CNAME?

Free applications only gets a *.deploy.space domain name to access +their application. All other plans can set it to a domain of their chosing.

+ + + + \ No newline at end of file diff --git a/docs/next/platformatic-cloud/quick-start-guide/index.html b/docs/next/platformatic-cloud/quick-start-guide/index.html new file mode 100644 index 00000000000..55df824e221 --- /dev/null +++ b/docs/next/platformatic-cloud/quick-start-guide/index.html @@ -0,0 +1,45 @@ + + + + + +Cloud Quick Start Guide | Platformatic Open Source Software + + + + + +
+
Version: Next

Cloud Quick Start Guide

This guide shows you how to create and deploy an application to +Platformatic Cloud.

Prerequisites

To follow along with this guide you'll need to have these things installed:

You will also need to have a GitHub account.

Log in to Platformatic Cloud

Go to the Platformatic Cloud website and click on the +Continue with GitHub button. You'll be transferred to a GitHub page that +asks you to Authorize Platformatic Cloud. To continue, click on the +Authorize platformatic button.

Screenshot of Continue with GitHub button

On the Platformatic Cloud Service Agreements page, check the boxes and +click the Continue button. You'll then be redirected to your Cloud Dashboard page.

Create a Cloud app

Screenshot of an empty Apps page

Click the Create an app now button on your Cloud Dashboard page.

Enter quick-start-app as your application name. Click the Create Application button.

Create a static app workspace

Enter production as the name for your workspace. Then click on the Create Workspace button.

On the next page you'll see the Workspace ID and API key for your app workspace. +Copy them and store them somewhere secure for future reference, for example in a password manager app. +The API key will be used to deploy your app to the workspace that you've just created.

Click on the Back to dashboard button.

Create a dynamic app workspace

On your Cloud Dashboard, click on your app, then click on Create Workspace in the Workspaces +sidebar.

Screenshot of the create app workspace screen

The Dynamic Workspace option will be automatically enabled as you have already created a +static workspace. Dynamic workspaces can be used to deploy preview applications for GitHub +pull requests.

Enter development as the name for your workspace, then click on the Create Workspace button. +Copy the Workspace ID and API key and store them somewhere secure.

Create a GitHub repository

Go to the Create a new repository page on GitHub. +Enter quick-start-app as the Repository name for your new repository. +Click on the Add a README file checkbox and click the Create repository +button.

Add the workspace API keys as repository secrets

Go to the Settings tab on your app's GitHub repository. Click into the +Secrets and variables > Actions section and add the following secrets:

NameSecret
PLATFORMATIC_STATIC_WORKSPACE_IDYour app's static workspace ID
PLATFORMATIC_STATIC_WORKSPACE_API_KEYYour app's static workspace API key
PLATFORMATIC_DYNAMIC_WORKSPACE_IDYour app's dynamic workspace ID
PLATFORMATIC_DYNAMIC_WORKSPACE_API_KEYYour app's dynamic workspace API key

Click on the New repository secret button to add a secret.

tip

You can also use the GitHub CLI to set secrets on your GitHub repository, for example:

gh secret set \
--app actions \
--env-file <FILENAME_OF_ENV_FILE_WITH_SECRETS> \
--repos <YOUR_GITHUB_USERNAME>/<REPO_NAME>

Create a new Platformatic app

In your terminal, use Git to clone your repository from GitHub. For example:

git clone git@github.com:username/quick-start-app.git
tip

See the GitHub documentation for help with +Cloning a repository.

Now change in to the project directory:

cd quick-start-app

Now run this command to start the Platformatic creator wizard:

npm create platformatic@latest

This interactive command-line tool will ask you some questions about how you'd +like to set up your new Platformatic app. For this guide, select these options:

- Which kind of project do you want to create?     => DB
- Where would you like to create your project? => .
- Do you want to create default migrations? => yes
- Do you want to create a plugin? => yes
- Do you want to use TypeScript? => no
- Do you want to overwrite the existing README.md? => yes
- Do you want to run npm install? => yes (this can take a while)
- Do you want to apply the migrations? => yes
- Do you want to generate types? => yes
- Do you want to create the github action to deploy this application to Platformatic Cloud dynamic workspace? => yes
- Do you want to create the github action to deploy this application to Platformatic Cloud static workspace? => yes

Copy and paste your dynamic and static workspace IDs when prompted by the creator wizard.

Once the wizard is complete, you'll have a Platformatic app project in the +quick-start-app directory, with example migration files and a plugin script.

Deploy the app

In your project directory, commit your application with Git:

git add .

git commit -m "Add Platformatic app"

Now push your changes up to GitHub:

git push origin main

On the GitHub repository page in your browser click on the Actions tab. +You should now see the Platformatic Cloud deployment workflow running.

Test the deployed app

Screenshot of a static app workspace that has had an app deployed to it

Once the GitHub Actions deployment workflow has completed, go to the production workspace +for your app in Platformatic Cloud. Click on the link for the Entry Point. You should now +see the Platformatic DB app home page.

Click on the OpenAPI Documentation link to try out your app's REST API using the Swagger UI.

Screenshot of Swagger UI for a Platformatic DB app

Preview pull request changes

When a pull request is opened on your project's GitHub repository, a preview app will automatically +be deployed to your app's dynamic workspace.

To see a preview app in action, create a new Git branch:

git checkout -b add-hello-endpoint

Then open up your app's plugin.js file in your code editor. Add the following code inside +the existing empty function:

app.get('/hello', async function(request, reply) {
return { hello: 'from Platformatic Cloud' }
})

Save the changes, then commit and push them up to GitHub:

git add plugin.js

git commit -m "Add hello endpoint"

git push -u origin add-hello-endpoint

Now create a pull request for your changes on GitHub. At the bottom of the +pull request page you'll see that a deployment has been triggered to your +app's dynamic workspace.

Screenshot of checks on a GitHub pull request

Once the deployment has completed, a comment will appear on your pull request +with a link to the preview app.

Screenshot of a deployed preview app comment on a GitHub pull request

Click on the Application URL link. If you add /hello on to the URL, +you should receive a response from the endpoint that you just added to +your application.

Screenshot of a JSON response from an API endpoint

+ + + + \ No newline at end of file diff --git a/docs/next/reference/cli/index.html b/docs/next/reference/cli/index.html new file mode 100644 index 00000000000..dd6e3747765 --- /dev/null +++ b/docs/next/reference/cli/index.html @@ -0,0 +1,44 @@ + + + + + +Platformatic CLI | Platformatic Open Source Software + + + + + +
+
Version: Next

Platformatic CLI

Installation and usage

Install the Platformatic CLI as a dependency for your project:

npm install platformatic

Once it's installed you can run it with:

npx platformatic
info

The platformatic package can be installed globally, but installing it as a +project dependency ensures that everyone working on the project is using the +same version of the Platformatic CLI.

Commands

The Platformatic CLI provides the following commands:

help

Welcome to Platformatic. Available commands are:

  • help - display this message.
  • help <command> - show more information about a command.
  • db - start Platformatic DB; type platformatic db help to know more.
  • service - start Platformatic Service; type platformatic service help to know more.
  • upgrade - upgrade the Platformatic configuration to the latest version.
  • gh - create a new gh action for Platformatic deployments.
  • deploy - deploy a Platformatic application to the cloud.
  • runtime - start Platformatic Runtime; type platformatic runtime help to know more.
  • start - start a Platformatic application.
  • frontend- create frontend code to consume the REST APIs.

compile

Compile all typescript plugins.

  $ platformatic compile

This command will compile the TypeScript plugins for each platformatic application.

deploy

Deploys an application to the Platformatic Cloud.

 $ platformatic deploy

Options:

  • -t, --type static/dynamic - The type of the workspace.
  • -c, --config FILE - Specify a configuration file to use.
  • -k, --keys FILE - Specify a path to the workspace keys file.
  • -l --label TEXT - The deploy label. Only for dynamic workspaces.
  • -e --env FILE - The environment file to use. Default: ".env"
  • -s --secrets FILE - The secrets file to use. Default: ".secrets.env"
  • --workspace-id uuid - The workspace id where the application will be deployed.
  • --workspace-key TEXT - The workspace key where the application will be deployed.
  1. To deploy a Platformatic application to the cloud, you should go to the Platformatic cloud dashboard and create a workspace.
  2. Once you have created a workspace, retrieve your workspace id and key from the workspace settings page. Optionally, you can download the provided workspace env file, which you can use with the --keys option.

ℹ️

When deploying an application to a dynamic workspace, specify the deploy --label option. You can find it on your cloud dashboard or you can specify a new one.

gh

Creates a gh action to deploy platformatic services on workspaces.

 $ platformatic gh -t dynamic

Options:

  • -w --workspace ID - The workspace ID where the service will be deployed.
  • -t, --type static/dynamic - The type of the workspace. Defaults to static.
  • -c, --config FILE - Specify a configuration file to use.
  • -b, --build - Build the service before deploying (npm run build).

If not specified, the configuration will be loaded from any of the following, in the current directory.

  • platformatic.db.json, or
  • platformatic.db.yml, or
  • platformatic.db.tml, or
  • platformatic.service.json, or
  • platformatic.service.yml, or
  • platformatic.service.tml

You can find more details about the configuration format here:

start

Start a Platformatic application with the following command:

$ platformatic start

Options:

  • -c, --config <path> - Path to the configuration file.
  • --inspect[=[host:]port] - Start the Node.js debugger. host defaults to '127.0.0.1'. port defaults to 9229. Use caution when binding to a public host:port combination.
  • --inspect-brk[=[host:]port] - Start the Node.js debugger and block until a client has attached. host defaults to '127.0.0.1'. port defaults to 9229. Use caution when binding to a public host:port combination.

upgrade

Upgrade the Platformatic schema configuration to the latest version.

 $ platformatic upgrade

Options:

  • -c, --config FILE - Specify a schema configuration file to use.

If not specified, the configuration will be loaded from any of the following, in the current directory.

  • platformatic.db.json, or
  • platformatic.db.yml, or
  • platformatic.db.tml, or
  • platformatic.service.json, or
  • platformatic.service.yml, or
  • platformatic.service.tml

You can find more details about the configuration format here:

client

platformatic client <command>

help

Create a Fastify plugin that exposes a client for a remote OpenAPI or GraphQL API.

To create a client for a remote OpenAPI API, you can use the following command:

$ platformatic client http://example.com/to/schema/file -n myclient

To create a client for a remote Graphql API, you can use the following command:

$ platformatic client http://example.com/graphql -n myclient

Instead of a URL, you can also use a local file:

$ platformatic client path/to/schema -n myclient

This will create a Fastify plugin that exposes a client for the remote API in a folder myclient +and a file named myclient.js inside it.

If platformatic config file is specified, it will be edited and a clients section will be added. +Then, in any part of your Platformatic application you can use the client.

You can use the client in your application in Javascript, calling a GraphQL endpoint:

module.exports = async function (app, opts) {
app.post('/', async (request, reply) => {
const res = await app.myclient.graphql({
query: 'query { hello }'
})
return res
})
}

or in Typescript, calling an OpenAPI endpoint:

import { FastifyInstance } from 'fastify'
/// <reference path="./myclient" />

export default async function (app: FastifyInstance) {
app.get('/', async () => {
return app.myclient.get({})
})
}

Options:

  • -c, --config <path> - Path to the configuration file.
  • -n, --name <name> - Name of the client.
  • -f, --folder <name> - Name of the plugin folder, defaults to --name value.
  • -t, --typescript - Generate the client plugin in TypeScript.
  • --full-response - Client will return full response object rather than just the body.
  • --full-request - Client will be called with all parameters wrapped in body, headers and query properties.
  • --full - Enables both --full-request and --full-response overriding them.
  • --optional-headers <headers> - Comma separated string of headers that will be marked as optional in the type file
  • --validate-response - If set, will validate the response body against the schema.

composer

platformatic composer <command>

help

Available commands:

  • help - show this help message.
  • help <command> - shows more information about a command.
  • start - start the server.
  • openapi schemas fetch - fetch OpenAPI schemas from services.

openapi schemas fetch

Fetch OpenAPI schemas from remote services to use in your Platformatic project.

  $ platformatic composer openapi schemas fetch

It will fetch all the schemas from the remote services and store them by path +set in the platformatic.composer.json file. If the path is not set, it will +skip fetching the schema.

start

Start the Platformatic Composer server with the following command:

 $ platformatic composer start

You will need a configuration file. Here is an example to get you started, +save the following as platformatic.composer.json:

  {
"server": {
"hostname": "127.0.0.1",
"port": 0,
"logger": {
"level": "info"
}
},
"composer": {
"services": [
{
"id": "service1",
"origin": "http://127.0.0.1:3051",
"openapi": {
"url": "/documentation/json"
}
},
{
"id": "service2",
"origin": "http://127.0.0.1:3052",
"openapi": {
"file": "./schemas/service2.openapi.json"
}
}
],
"refreshTimeout": 1000
}
}

By sending the SIGUSR2 signal, the server can be reloaded.

Options:

  • -c, --config FILE - Specify a configuration file to use.

If not specified, the configuration will be loaded from any of the following, in the current directory.

  • platformatic.composer.json, or
  • platformatic.composer.yml, or
  • platformatic.composer.tml

You can find more details about the configuration format here:

db

platformatic db <command>

compile

Compile typescript plugins.

  $ platformatic db compile

As a result of executing this command, the Platformatic DB will compile typescript +plugins in the outDir directory.

If not specified, the configuration will be loaded from any of the following, in the current directory.

  • platformatic.db.json, or
  • platformatic.db.yml, or
  • platformatic.db.tml

You can find more details about the configuration format here:

help

Available commands:

  • help - show this help message.
  • help <command> - shows more information about a command.
  • start - start the server.
  • compile - compile typescript plugins.
  • seed - run a seed file.
  • types - generate typescript types for entities.
  • schema - generate and print api schema.
  • migrations create - generate do and undo migration files.
  • migrations apply - apply migration files.

migrations apply

Apply all configured migrations to the database:

  $ platformatic db migrations apply

The migrations will be applied in the order they are specified in the +folder defined in the configuration file. If you want to apply a specific migration, +you can use the --to option:

  $ platformatic db migrations apply --to 001

Here is an example migration:

  CREATE TABLE graphs (
id SERIAL PRIMARY KEY,
name TEXT
);

You can always rollback to a specific migration with:

  $ platformatic db migrations apply --to VERSION

Use 000 to reset to the initial state.

Options:

  • -c, --config <path> - Path to the configuration file.
  • -t, --to <version> - Migrate to a specific version.

If not specified, the configuration will be loaded from any of the following, in the current directory.

  • platformatic.db.json, or
  • platformatic.db.yml, or
  • platformatic.db.tml

You can find more details about the configuration format here:

migrations create

Create next migration files.

  $ platformatic db migrations create

It will generate do and undo sql files in the migrations folder. The name of the +files will be the next migration number.

  $ platformatic db migrations create --name "create_users_table"

Options:

  • -c, --config <path> - Path to the configuration file.

If not specified, the configuration will be loaded from any of the following, in the current directory.

  • platformatic.db.json, or
  • platformatic.db.yml, or
  • platformatic.db.tml

You can find more details about the configuration format here:

migrations

Available commands:

  • migrations create - generate do and undo migration files.
  • migrations apply - apply migration files.

schema

Update the config schema file:

  • schema config - update the JSON schema config available on platformatic.db.schema.json

Your configuration on platformatic.db.json has a schema defined to improve the developer experience and avoid mistakes when updating the configuration of Platformatic DB. +When you run platformatic db init, a new JSON $schema property is added in platformatic.db.schema.json. This can allow your IDE to add suggestions (f.e. mandatory/missing fields, types, default values) by opening the config in platformatic.db.json. +Running platformatic db schema config you can update your schema so that it matches well the latest changes available on your config.

Generate a schema from the database and prints it to standard output:

  • schema graphql - generate the GraphQL schema
  • schema openapi - generate the OpenAPI schema

Options:

  • -c, --config FILE - Specify a configuration file to use.

If not specified, the configuration will be loaded from any of the following, in the current directory.

  • platformatic.db.json, or
  • platformatic.db.yml, or
  • platformatic.db.tml

You can find more details about the configuration format here:

seed

Load a seed into the database. This is a convenience method that loads +a JavaScript file and configure @platformatic/sql-mapper to connect to +the database specified in the configuration file.

Here is an example of a seed file:

  'use strict'

module.exports = async function ({ entities, db, sql }) {
await entities.graph.save({ input: { name: 'Hello' } })
await db.query(sql`
INSERT INTO graphs (name) VALUES ('Hello 2');
`)
}

You can run this using the seed command:

  $ platformatic db seed seed.js

Options:

  • --config - Path to the configuration file.

If not specified, the configuration will be loaded from any of the following, in the current directory.

  • platformatic.db.json, or
  • platformatic.db.yml, or
  • platformatic.db.tml

You can find more details about the configuration format here:

start

Start the Platformatic DB server with the following command:

 $ platformatic db start

You will need a configuration file. Here is an example to get you started, +save the following as platformatic.db.json:

  {
"server": {
"hostname": "127.0.0.1",
"port": 0,
"logger": {
"level": "info"
}
},
"db": {
"connectionString": "sqlite://./db"
},
"migrations": {
"dir": "./migrations"
}
}

Remember to create a migration, run the db help migrate command to know more.

All outstanding migrations will be applied to the database unless the +migrations.autoApply configuration option is set to false.

By sending the SIGUSR2 signal, the server can be reloaded.

Options:

  • -c, --config FILE - Specify a configuration file to use.

If not specified, the configuration will be loaded from any of the following, in the current directory.

  • platformatic.db.json, or
  • platformatic.db.yml, or
  • platformatic.db.tml

You can find more details about the configuration format here:

types

Generate typescript types for your entities from the database.

  $ platformatic db types

As a result of executing this command, the Platformatic DB will generate a types +folder with a typescript file for each database entity. It will also generate a +global.d.ts file that injects the types into the Application instance.

In order to add type support to your plugins, you need to install some additional +dependencies. To do this, copy and run an npm install command with dependencies +that "platformatic db types" will ask you.

Here is an example of a platformatic plugin.js with jsdoc support. +You can use it to add autocomplete to your code.

/// <reference path="./global.d.ts" />
'use strict'

/** @param {import('fastify').FastifyInstance} app */
module.exports = async function (app) {
app.get('/movie', async () => {
const movies = await app.platformatic.entities.movie.find({
where: { title: { eq: 'The Hitchhiker\'s Guide to the Galaxy' } }
})
return movies[0].id
})
}

If not specified, the configuration will be loaded from any of the following, in the current directory.

  • platformatic.db.json, or
  • platformatic.db.yml, or
  • platformatic.db.tml

You can find more details about the configuration format here:

service

platformatic service <command>

compile

Compile typescript plugins.

  $ platformatic service compile

As a result of executing this command, Platformatic Service will compile typescript +plugins in the outDir directory.

If not specified, the configuration will be loaded from any of the following, in the current directory.

  • platformatic.service.json, or
  • platformatic.service.yml, or
  • platformatic.service.tml

You can find more details about the configuration format here:

help

Available commands:

  • help - show this help message.
  • help <command> - shows more information about a command.
  • start - start the server.
  • schema config - generate the schema configuration file.

schema

Update the config schema file:

  • schema config - update the JSON schema config available on platformatic.service.schema.json

Your configuration on platformatic.service.json has a schema defined to improve the developer experience and avoid mistakes when updating the configuration of Platformatic Service. +When you initialize a new Platformatic service (f.e. running npm create platformatic@latest), a new JSON $schema property is added in the platformatic.service.json config. This can allow your IDE to add suggestions (f.e. mandatory/missing fields, types, default values) by opening the config in platformatic.service.json. +Running platformatic service schema config you can update your schema so that it matches well the latest changes available on your config.

start

Start the Platformatic Service with the following command:

 $ platformatic service start

You will need a configuration file. Here is an example to get you started, +save the following as platformatic.service.json:

{
"server": {
"hostname": "127.0.0.1",
"port": 0,
"logger": {
"level": "info"
}
},
"plugin": {
"path": "./plugin.js"
}
}

frontend

platformatic frontend <url> <language>

Create frontend code to consume the REST APIs of a Platformatic application.

From the directory you want the frontend code to be generated (typically <YOUR_FRONTEND_APP_DIRECTORY>/src/) run -

npx platformatic frontend http://127.0.0.1:3042 ts

ℹ️

Where http://127.0.0.1:3042 must be replaced with your Platformatic application endpoint, and the language can either be ts or js. When the command is run, the Platformatic CLI generates -

  • api.d.ts - A TypeScript module that includes all the OpenAPI-related types.
  • api.ts or api.js - A module that includes a function for every single REST endpoint.

If you use the --name option it will create custom file names.

npx platformatic frontend http://127.0.0.1:3042 ts --name foobar

Will create foobar.ts and foobar-types.d.ts

Refer to the dedicated guide where the full process of generating and consuming the frontend code is described.

In case of problems, please check that:

  • The Platformatic app URL is valid.
  • The Platformatic app whose URL belongs must be up and running.
  • OpenAPI must be enabled (db.openapi in your platformatic.db.json is not set to false). You can find more details about the db configuration format here.
  • CORS must be managed in your Platformatic app (server.cors.origin.regexp in your platformatic.db.json is set to /*/, for instance). You can find more details about the cors configuration here.

runtime

platformatic runtime <command>

compile

Compile all typescript plugins for all services.

  $ platformatic runtime compile

This command will compile the TypeScript +plugins for each services registered in the runtime.

help

Available commands:

  • help - show this help message.
  • help <command> - shows more information about a command.
  • start - start the application.

start

Start the Platformatic Runtime with the following command:

 $ platformatic runtime start

start

Start a Platformatic application with the following command:

$ platformatic start

Options:

  • -c, --config <path> - Path to the configuration file.
  • --inspect[=[host:]port] - Start the Node.js debugger. host defaults to '127.0.0.1'. port defaults to 9229. Use caution when binding to a public host:port combination.
  • --inspect-brk[=[host:]port] - Start the Node.js debugger and block until a client has attached. host defaults to '127.0.0.1'. port defaults to 9229. Use caution when binding to a public host:port combination.
+ + + + \ No newline at end of file diff --git a/docs/next/reference/client/frontend/index.html b/docs/next/reference/client/frontend/index.html new file mode 100644 index 00000000000..454b2751dbd --- /dev/null +++ b/docs/next/reference/client/frontend/index.html @@ -0,0 +1,17 @@ + + + + + +Frontend client | Platformatic Open Source Software + + + + + +
+
Version: Next

Frontend client

Create implementation and type files that exposes a client for a remote OpenAPI server, that uses fetch and can run in any browser.

To create a client for a remote OpenAPI API, you can use the following command:

$ platformatic frontend http://exmaple.com/to/schema/file <language> --name <clientname>

where <language> can be either js or ts.

This will create two files clientname.js (or clientname.ts) and clientname-types.d.ts for types.

clientname by default is api

Usage

The implementation generated by the tool exports all the named operation found and a factory object.

Named operations

import { setBaseUrl, getMovies } from './api.js'

setBaseUrl('http://my-server-url.com') // modifies the global `baseUrl` variable

const movies = await getMovies({})
console.log(movies)

Factory

The factory object is called build and can be used like this

import build from './api.js'

const client = build('http://my-server-url.com')

const movies = await client.getMovies({})
console.log(movies)

You can use both named operations and the factory in the same file. They can work on different hosts, so the factory does not use the global setBaseUrl function.

Generated Code

The type file will look like this

export interface GetMoviesRequest {
'limit'?: number;
'offset'?: number;
// ... all other options
}

interface GetMoviesResponseOK {
'id': number;
'title': string;
}
export interface Api {
setBaseUrl(newUrl: string) : void;
getMovies(req: GetMoviesRequest): Promise<Array<GetMoviesResponseOK>>;
// ... all operations listed here
}

type PlatformaticFrontendClient = Omit<Api, 'setBaseUrl'>
export default function build(url: string): PlatformaticFrontendClient

The javascript implementation will look like this

let baseUrl = ''
/** @type {import('./api-types.d.ts').Api['setBaseUrl']} */
export const setBaseUrl = (newUrl) => { baseUrl = newUrl }

/** @type {import('./api-types.d.ts').Api['getMovies']} */
export const getMovies = async (request) => {
return await _getMovies(baseUrl, request)
}
async function _createMovie (url, request) {
const response = await fetch(`${url}/movies/`, {
method:'post',
body: JSON.stringify(request),
headers: {
'Content-Type': 'application/json'
}
})

if (!response.ok) {
throw new Error(await response.text())
}

return await response.json()
}

/** @type {import('./api-types.d.ts').Api['createMovie']} */
export const createMovie = async (request) => {
return await _createMovie(baseUrl, request)
}
// ...

export default function build (url) {
return {
getMovies: _getMovies.bind(url, ...arguments),
// ...
}
}

The typescript implementation will look like this

import type { Api } from './api-types'
import * as Types from './api-types'

let baseUrl = ''
export const setBaseUrl = (newUrl: string) : void => { baseUrl = newUrl }

const _getMovies = async (url: string, request: Types.GetMoviesRequest) => {
const response = await fetch(`${url}/movies/?${new URLSearchParams(Object.entries(request || {})).toString()}`)

if (!response.ok) {
throw new Error(await response.text())
}

return await response.json()
}

export const getMovies: Api['getMovies'] = async (request: Types.GetMoviesRequest) => {
return await _getMovies(baseUrl, request)
}
// ...
export default function build (url) {
return {
getMovies: _getMovies.bind(url, ...arguments),
// ...
}
}
+ + + + \ No newline at end of file diff --git a/docs/next/reference/client/introduction/index.html b/docs/next/reference/client/introduction/index.html new file mode 100644 index 00000000000..149ae423413 --- /dev/null +++ b/docs/next/reference/client/introduction/index.html @@ -0,0 +1,34 @@ + + + + + +Platformatic Client | Platformatic Open Source Software + + + + + +
+
Version: Next

Platformatic Client

Create a Fastify plugin that exposes a client for a remote OpenAPI or GraphQL API.

To create a client for a remote OpenAPI API, you can use the following command:

$ platformatic client http://exmaple.com/to/schema/file --name myclient

To create a client for a remote Graphql API, you can use the following command:

$ platformatic client http://exmaple.com/grapqhl --name myclient

Usage with Platformatic Service or Platformatic DB

If you run the generator in in a Platformatic application, and it will +automatically extend it to load your client by editing the configuration file +and adding a clients section. +Then, in any part of your Platformatic application you can use the client.

You can use the client in your application in Javascript, calling a GraphQL endpoint:

// Use a typescript reference to set up autocompletion
// and explore the generated APIs.

/// <reference path="./myclient" />

/** @type {import('fastify').FastifyPluginAsync<{} */
module.exports = async function (app, opts) {
app.post('/', async (request, reply) => {
const res = await app.myclient.graphql({
query: 'query { movies { title } }'
})
return res
})
}

or in Typescript, calling an OpenAPI endpoint:

import { FastifyInstance } from 'fastify'
/// <reference path="./myclient" />

export default async function (app: FastifyInstance) {
app.get('/', async () => {
return app.myclient.get({})
})
}

The client configuration in the platformatic.db.json and platformatic.service.json would look like:

{
"clients": [{
"schema": "./myclient/myclient.openapi.json" // or ./myclient/myclient.schema.graphl
"name": "myclient",
"type": "openapi" // or graphql
"url": "{ PLT_MYCLIENT_URL }"
}]
}

Note that the generator would also have updated the .env and .env.sample files if they exists.

Generating a client for a service running within Platformatic Runtime

Platformatic Runtime allows you to create a network of services that are not exposed. +To create a client to invoke one of those services from another, run:

$ platformatic client --name <clientname> --runtime <serviceId>

Where <clientname> is the name of the client and <serviceId> is the id of the given service +(which correspond in the basic case with the folder name of that service). +The client generated is identical to the one in the previous section.

Note that this command looks for a platformatic.runtime.json in a parent directory.

Example

As an example, consider a network of three microservices:

  • somber-chariot, an instance of Platformatic DB;
  • languid-noblemen, an instance of Platformatic Service;
  • pricey-paesant, an instance of Platformatic Composer, which is also the runtime entrypoint.

From within the languid-noblemen folder, we can run:

$ platformatic client --name chariot --runtime somber-chariot

The client configuration in the platformatic.db.json and platformatic.service.json would look like:

{
"clients": [{
"path": "./chariot",
"serviceId": "somber-chariot"
}]
}

Even if the client is generated from an HTTP endpoint, it is possible to add a serviceId property each client object shown above. +This is not required, but if using the Platformatic Runtime, the serviceId +property will be used to identify the service dependency.

Types Generator

The types for the client are automatically generated for both OpenAPI and GraphQL schemas.

You can generate only the types with the --types-only flag.

For example

$ platformatic client http://exmaple.com/to/schema/file --name myclient --types-only

Will create the single myclient.d.ts file in current directory

OpenAPI

We provide a fully typed experience for OpenAPI, Typing both the request and response for +each individual OpenAPI operation.

Consider this example:

// Omitting all the individual Request and Reponse payloads for brevity

interface Client {
getMovies(req: GetMoviesRequest): Promise<Array<GetMoviesResponse>>;
createMovie(req: CreateMovieRequest): Promise<CreateMovieResponse>;
updateMovies(req: UpdateMoviesRequest): Promise<Array<UpdateMoviesResponse>>;
getMovieById(req: GetMovieByIdRequest): Promise<GetMovieByIdResponse>;
updateMovie(req: UpdateMovieRequest): Promise<UpdateMovieResponse>;
updateMovie(req: UpdateMovieRequest): Promise<UpdateMovieResponse>;
deleteMovies(req: DeleteMoviesRequest): Promise<DeleteMoviesResponse>;
getQuotesForMovie(req: GetQuotesForMovieRequest): Promise<Array<GetQuotesForMovieResponse>>;
getQuotes(req: GetQuotesRequest): Promise<Array<GetQuotesResponse>>;
createQuote(req: CreateQuoteRequest): Promise<CreateQuoteResponse>;
updateQuotes(req: UpdateQuotesRequest): Promise<Array<UpdateQuotesResponse>>;
getQuoteById(req: GetQuoteByIdRequest): Promise<GetQuoteByIdResponse>;
updateQuote(req: UpdateQuoteRequest): Promise<UpdateQuoteResponse>;
updateQuote(req: UpdateQuoteRequest): Promise<UpdateQuoteResponse>;
deleteQuotes(req: DeleteQuotesRequest): Promise<DeleteQuotesResponse>;
getMovieForQuote(req: GetMovieForQuoteRequest): Promise<GetMovieForQuoteResponse>;
}

type ClientPlugin = FastifyPluginAsync<NonNullable<client.ClientOptions>>

declare module 'fastify' {
interface FastifyInstance {
'client': Client;
}

interface FastifyRequest {
'client': Client;
}
}

declare namespace Client {
export interface ClientOptions {
url: string
}
export const client: ClientPlugin;
export { client as default };
}

declare function client(...params: Parameters<ClientPlugin>): ReturnType<ClientPlugin>;
export = client;

GraphQL

We provide a partially typed experience for GraphQL, because we do not want to limit +how you are going to query the remote system. Take a look at this example:

declare module 'fastify' {
interface GraphQLQueryOptions {
query: string;
headers: Record<string, string>;
variables: Record<string, unknown>;
}
interface GraphQLClient {
graphql<T>(GraphQLQuery): PromiseLike<T>;
}
interface FastifyInstance {
'client'
: GraphQLClient;

}

interface FastifyRequest {
'client'<T>(GraphQLQuery): PromiseLike<T>;
}
}

declare namespace client {
export interface Clientoptions {
url: string
}
export interface Movie {
'id'?: string;

'title'?: string;

'realeasedDate'?: string;

'createdAt'?: string;

'preferred'?: string;

'quotes'?: Array<Quote>;

}
export interface Quote {
'id'?: string;

'quote'?: string;

'likes'?: number;

'dislikes'?: number;

'movie'?: Movie;

}
export interface MoviesCount {
'total'?: number;

}
export interface QuotesCount {
'total'?: number;

}
export interface MovieDeleted {
'id'?: string;

}
export interface QuoteDeleted {
'id'?: string;

}
export const client: Clientplugin;
export { client as default };
}

declare function client(...params: Parameters<Clientplugin>): ReturnType<Clientplugin>;
export = client;

Given only you can know what GraphQL query you are producing, you are responsible for typing +it accordingly.

Usage with standalone Fastify

If a platformatic configuration file is not found, a complete Fastify plugin is generated to be +used in your Fastify application like so:

const fastify = require('fastify')()
const client = require('./your-client-name')

fastify.register(client, {
url: 'http://example.com'
})

// GraphQL
fastify.post('/', async (request, reply) => {
const res = await request.movies.graphql({
query: 'mutation { saveMovie(input: { title: "foo" }) { id, title } }'
})
return res
})

// OpenAPI
fastify.post('/', async (request, reply) => {
const res = await request.movies.createMovie({ title: 'foo' })
return res
})

fastify.listen({ port: 3000 })

Note that you would need to install @platformatic/client as a depedency.

How are the method names defined in OpenAPI

The names of the operations are defined in the OpenAPI specification. +Specifically, we use the operationId. +If that's not part of the spec, +the name is generated by combining the parts of the path, +like /something/{param1}/ and a method GET, it genertes getSomethingParam1.

Authentication

It's very common that downstream services requires some form of Authentication. +How could we add the necessary headers? You can configure them from your plugin:

/// <reference path="./myclient" />

/** @type {import('fastify').FastifyPluginAsync<{} */
module.exports = async function (app, opts) {
app.configureMyclient({
async getHeaders (req, reply) {
return {
'foo': 'bar'
}
}
})

app.post('/', async (request, reply) => {
const res = await app.myclient.graphql({
query: 'query { movies { title } }'
})
return res
})
}

Telemetry propagation

To correctly propagate telemetry information, be sure to get the client from the request object, e.g.:

fastify.post('/', async (request, reply) => {
const res = await request.movies.createMovie({ title: 'foo' })
return res
})
+ + + + \ No newline at end of file diff --git a/docs/next/reference/client/programmatic/index.html b/docs/next/reference/client/programmatic/index.html new file mode 100644 index 00000000000..5f02c1f9b7d --- /dev/null +++ b/docs/next/reference/client/programmatic/index.html @@ -0,0 +1,17 @@ + + + + + +Programmatic API | Platformatic Open Source Software + + + + + +
+
Version: Next

Programmatic API

It is possible to use the Platformatic client without the generator.

OpenAPI Client

import { buildOpenAPIClient } from '@platformatic/client'

const client = await buildOpenAPIClient({
url: `https://yourapi.com/documentation/json`,
// path: 'path/to/openapi.json',
headers: {
'foo': 'bar'
}
})

const res = await client.yourOperationName({ foo: 'bar' })

console.log(res)

If you use Typescript you can take advantage of the generated types file

import { buildOpenAPIClient } from '@platformatic/client'
import Client from './client'
//
// interface Client {
// getMovies(req: GetMoviesRequest): Promise<Array<GetMoviesResponse>>;
// createMovie(req: CreateMovieRequest): Promise<CreateMovieResponse>;
// ...
// }
//

const client: Client = await buildOpenAPIClient<Client>({
url: `https://yourapi.com/documentation/json`,
// path: 'path/to/openapi.json',
headers: {
'foo': 'bar'
}
})

const res = await client.getMovies()
console.log(res)

GraphQL Client

import { buildGraphQLClient } from '@platformatic/client'

const client = await buildGraphQLClient({
url: `https://yourapi.com/graphql`,
headers: {
'foo': 'bar'
}
})

const res = await client.graphql({
query: `
mutation createMovie($title: String!) {
saveMovie(input: {title: $title}) {
id
title
}
}
`,
variables: {
title: 'The Matrix'
}
})

console.log(res)
+ + + + \ No newline at end of file diff --git a/docs/next/reference/composer/api-modification/index.html b/docs/next/reference/composer/api-modification/index.html new file mode 100644 index 00000000000..836a122bb29 --- /dev/null +++ b/docs/next/reference/composer/api-modification/index.html @@ -0,0 +1,19 @@ + + + + + +API modification | Platformatic Open Source Software + + + + + +
+
Version: Next

API modification

If you want to modify automatically generated API, you can use composer custom onRoute hook.

addComposerOnRouteHook(openApiPath, methods, handler)

  • openApiPath (string) - A route OpenAPI path that Platformatic Composer takes from the OpenAPI specification.
  • methods (string[]) - Route HTTP methods that Platformatic Composer takes from the OpenAPI specification.
  • handler (function) - fastify onRoute hook handler.

onComposerResponse

onComposerResponse hook is called after the response is received from a composed service. +It might be useful if you want to modify the response before it is sent to the client. +If you want to use it you need to add onComposerResponse property to the config object of the route options.

  • request (object) - fastify request object.
  • reply (object) - fastify reply object.
  • body (object) - undici response body object.

Example

app.platformatic.addComposerOnRouteHook('/users/{id}', ['GET'], routeOptions => {
routeOptions.schema.response[200] = {
type: 'object',
properties: {
firstName: { type: 'string' },
lastName: { type: 'string' }
}
}

async function onComposerResponse (request, reply, body) {
const payload = await body.json()
const newPayload = {
firstName: payload.first_name,
lastName: payload.last_name
}
reply.send(newPayload)
}
routeOptions.config.onComposerResponse = onComposerResponse
})
+ + + + \ No newline at end of file diff --git a/docs/next/reference/composer/configuration/index.html b/docs/next/reference/composer/configuration/index.html new file mode 100644 index 00000000000..7734e12eeb3 --- /dev/null +++ b/docs/next/reference/composer/configuration/index.html @@ -0,0 +1,38 @@ + + + + + +Configuration | Platformatic Open Source Software + + + + + +
+
Version: Next

Configuration

Platformatic Composer configured with a configuration file. It supports the use +of environment variables as setting values with configuration placeholders.

Configuration file

If the Platformatic CLI finds a file in the current working directory matching +one of these filenames, it will automatically load it:

  • platformatic.composer.json
  • platformatic.composer.json5
  • platformatic.composer.yml or platformatic.composer.yaml
  • platformatic.composer.tml or platformatic.composer.toml

Alternatively, a --config option with a configuration +filepath can be passed to most platformatic composer CLI commands.

The configuration examples in this reference use JSON.

Supported formats

FormatExtensions
JSON.json
JSON5.json5
YAML.yml, .yaml
TOML.tml

Comments are supported by the JSON5, YAML and TOML file formats.

Settings

Configuration settings are organised into the following groups:

Sensitive configuration settings containing sensitive data should be set using configuration placeholders.

server

A required object with the following settings:

  • hostname (required, string) — Hostname where Platformatic Composer server will listen for connections.

  • port (required, number) — Port where Platformatic Composer server will listen for connections.

  • healthCheck (boolean or object) — Enables the health check endpoint.

    • Powered by @fastify/under-pressure.
    • The value can be an object, used to specify the interval between checks in milliseconds (default: 5000)

    Example

    {
    "server": {
    ...
    "healthCheck": {
    "interval": 2000
    }
    }
    }
  • cors (object) — Configuration for Cross-Origin Resource Sharing (CORS) headers.

    • All options will be passed to the @fastify/cors plugin. In order to specify a RegExp object, you can pass { regexp: 'yourregexp' }, +it will be automatically converted.
  • logger (object) -- the logger configuration.

  • pluginTimeout (integer) -- the number of milliseconds to wait for a Fastify plugin to load, see the fastify docs for more details.

  • https (object) - Configuration for HTTPS supporting the following options.

    • key (required, string, object, or array) - If key is a string, it specifies the private key to be used. If key is an object, it must have a path property specifying the private key file. Multiple keys are supported by passing an array of keys.
    • cert (required, string, object, or array) - If cert is a string, it specifies the certificate to be used. If cert is an object, it must have a path property specifying the certificate file. Multiple certificates are supported by passing an array of keys.

metrics

Configuration for a Prometheus server that will export monitoring metrics +for the current server instance. It uses fastify-metrics +under the hood.

This setting can be a boolean or an object. If set to true the Prometheus server will listen on http://0.0.0.0:9090.

Supported object properties:

  • hostname (string) — The hostname where Prometheus server will listen for connections.
  • port (number) — The port where Prometheus server will listen for connections.
  • auth (object) — Basic Auth configuration. username and password are required here +(use environment variables).

plugins

An optional object that defines the plugins loaded by Platformatic Composer.

  • paths (required, array): an array of paths (string) +or an array of objects composed as follows,

    • path (string): Relative path to plugin's entry point.
    • options (object): Optional plugin options.
    • encapsulate (boolean): if the path is a folder, it instruct Platformatic to not encapsulate those plugins.
    • maxDepth (integer): if the path is a folder, it limits the depth to load the content from.
  • typescript (boolean): enable typescript compilation. A tsconfig.json file is required in the same folder.

    Example

    {
    "plugins": {
    "paths": [{
    "path": "./my-plugin.js",
    "options": {
    "foo": "bar"
    }
    }]
    }
    }

watch

Disable watching for file changes if set to false. It can also be customized with the following options:

  • ignore (string[], default: null): List of glob patterns to ignore when watching for changes. If null or not specified, ignore rule is not applied. Ignore option doesn't work for typescript files.

  • allow (string[], default: ['*.js', '**/*.js']): List of glob patterns to allow when watching for changes. If null or not specified, allow rule is not applied. Allow option doesn't work for typescript files.

    Example

    {
    "watch": {
    "ignore": ["*.mjs", "**/*.mjs"],
    "allow": ["my-plugin.js", "plugins/*.js"]
    }
    }

composer

Configure @platformatic/composer specific settings such as services or refreshTimeout:

  • services (array, default: []) — is an array of objects that defines +the services managed by the composer. Each service object supports the following settings:

    • id (required, string) - A unique identifier for the service.
    • origin (string) - A service origin. Skip this option if the service is executing inside of Platformatic Runtime. In this case, service id will be used instead of origin.
    • openapi (required, object) - The configuration file used to compose OpenAPI specification. See the openapi for details.
    • proxy (object or false) - Service proxy configuration. If false, the service proxy is disabled.
      • prefix (required, string) - Service proxy prefix. All service routes will be prefixed with this value.
    • refreshTimeout (number) - The number of milliseconds to wait for check for changes in the service OpenAPI specification. If not specified, the default value is 1000.

openapi

  • url (string) - A path of the route that exposes the OpenAPI specification. If a service is a Platformatic Service or Platformatic DB, use /documentation/json as a value. Use this or file option to specify the OpenAPI specification.
  • file (string) - A path to the OpenAPI specification file. Use this or url option to specify the OpenAPI specification.
  • prefix (string) - A prefix for the OpenAPI specification. All service routes will be prefixed with this value.
  • config (string) - A path to the OpenAPI configuration file. This file is used to customize the OpenAPI specification. See the openapi-configuration for details.
openapi-configuration

The OpenAPI configuration file is a JSON file that is used to customize the OpenAPI specification. It supports the following options:

  • ignore (boolean) - If true, the route will be ignored by the composer. +If you want to ignore a specific method, use the ignore option in the nested method object.

    Example

    {
    "paths": {
    "/users": {
    "ignore": true
    },
    "/users/{id}": {
    "get": { "ignore": true },
    "put": { "ignore": true }
    }
    }
    }
  • alias (string) - Use it create an alias for the route path. Original route path will be ignored.

    Example

    {
    "paths": {
    "/users": {
    "alias": "/customers"
    }
    }
    }
  • rename (string) - Use it to rename composed route response fields. +Use json schema format to describe the response structure. For now it works only for 200 response.

    Example

    {
    "paths": {
    "/users": {
    "responses": {
    "200": {
    "type": "array",
    "items": {
    "type": "object",
    "properties": {
    "id": { "rename": "user_id" },
    "name": { "rename": "first_name" }
    }
    }
    }
    }
    }
    }
    }

Examples

Composition of two remote services:

{
"composer": {
"services": [
{
"id": "auth-service",
"origin": "https://auth-service.com",
"openapi": {
"url": "/documentation/json",
"prefix": "auth"
}
},
{
"id": "payment-service",
"origin": "https://payment-service.com",
"openapi": {
"file": "./schemas/payment-service.json"
}
}
],
"refreshTimeout": 1000
}
}

Composition of two local services inside of Platformatic Runtime:

{
"composer": {
"services": [
{
"id": "auth-service",
"openapi": {
"url": "/documentation/json",
"prefix": "auth"
}
},
{
"id": "payment-service",
"openapi": {
"file": "./schemas/payment-service.json"
}
}
],
"refreshTimeout": 1000
}
}

telemetry

Open Telemetry is optionally supported with these settings:

  • serviceName (required, string) — Name of the service as will be reported in open telemetry.
  • version (string) — Optional version (free form)
  • skip (array). Optional list of operations to skip when exporting telemetry defined object with properties:
    • method: GET, POST, PUT, DELETE, PATCH, HEAD, OPTIONS, TRACE
    • path. e.g.: /documentation/json
  • exporter (object or array) — Exporter configuration. If not defined, the exporter defaults to console. If an array of objects is configured, every object must be a valid exporter object. The exporter object has the following properties:
    • type (string) — Exporter type. Supported values are console, otlp, zipkin and memory (default: console). memory is only supported for testing purposes.
    • options (object) — These options are supported:
      • url (string) — The URL to send the telemetry to. Required for otlp exporter. This has no effect on console and memory exporters.
      • headers (object) — Optional headers to send with the telemetry. This has no effect on console and memory exporters.

Note that OTLP traces can be consumed by different solutions, like Jaeger. Here the full list.

Example

{
"telemetry": {
"serviceName": "test-service",
"exporter": {
"type": "otlp",
"options": {
"url": "http://localhost:4318/v1/traces"
}
}
}
}

Environment variable placeholders

The value for any configuration setting can be replaced with an environment variable +by adding a placeholder in the configuration file, for example {PLT_SERVER_LOGGER_LEVEL}.

All placeholders in a configuration must be available as an environment variable +and must meet the allowed placeholder name rules.

Example

platformatic.service.json
{
"server": {
"port": "{PORT}"
}
}

Platformatic will replace the placeholders in this example with the environment +variables of the same name.

Setting environment variables

If a .env file exists it will automatically be loaded by Platformatic using +dotenv. For example:

.env
PLT_SERVER_LOGGER_LEVEL=info
PORT=8080

The .env file must be located in the same folder as the Platformatic configuration +file or in the current working directory.

Environment variables can also be set directly on the commmand line, for example:

PLT_SERVER_LOGGER_LEVEL=debug npx platformatic composer

Allowed placeholder names

Only placeholder names prefixed with PLT_, or that are in this allow list, will be +dynamically replaced in the configuration file:

  • PORT

This restriction is to avoid accidentally exposing system environment variables. +An error will be raised by Platformatic if it finds a configuration placeholder +that isn't allowed.

The default allow list can be extended by passing a --allow-env CLI option with a +comma separated list of strings, for example:

npx platformatic composer --allow-env=HOST,SERVER_LOGGER_LEVEL

If --allow-env is passed as an option to the CLI, it will be merged with the +default allow list.

+ + + + \ No newline at end of file diff --git a/docs/next/reference/composer/introduction/index.html b/docs/next/reference/composer/introduction/index.html new file mode 100644 index 00000000000..47bf91382e2 --- /dev/null +++ b/docs/next/reference/composer/introduction/index.html @@ -0,0 +1,22 @@ + + + + + +Platformatic Composer | Platformatic Open Source Software + + + + + +
+
Version: Next

Platformatic Composer

Platformatic Composer is an HTTP server that automatically aggregates multiple +services APIs into a single API.

info

Platformatic Composer is currently in public beta.

Features

Public beta

Platformatic Composer is in public beta. You can use it in production, but it's quite +likely that you'll encounter significant bugs.

If you run into a bug or have a suggestion for improvement, please +raise an issue on GitHub.

Standalone usage

If you're only interested in the features available in Platformatic Composer, you can replace platformatic with @platformatic/composer in the dependencies of your package.json, so that you'll import fewer deps.

Example configuration file

The following configuration file can be used to start a new Platformatic +Composer project. For more details on the configuration file, see the +configuration documentation.

{
"$schema": "https://platformatic.dev/schemas/v0.26.0/composer",
"server": {
"hostname": "127.0.0.1",
"port": 0,
"logger": {
"level": "info"
}
},
"composer": {
"services": [
{
"id": "auth-service",
"origin": "https://auth-service.com",
"openapi": {
"url": "/documentation/json",
"prefix": "auth"
}
},
{
"id": "payment-service",
"origin": "https://payment-service.com",
"openapi": {
"url": "/documentation/json"
}
}
],
"refreshTimeout": 1000
},
"watch": true
}
+ + + + \ No newline at end of file diff --git a/docs/next/reference/composer/plugin/index.html b/docs/next/reference/composer/plugin/index.html new file mode 100644 index 00000000000..03362c8323f --- /dev/null +++ b/docs/next/reference/composer/plugin/index.html @@ -0,0 +1,18 @@ + + + + + +Plugin | Platformatic Open Source Software + + + + + +
+
Version: Next

Plugin

If you want to add features to a service, you will need to register a plugin, which will be in the form of a standard Fastify plugin.

The config file will specify where the plugin file is located as the example below:

{
...
"plugins": {
"paths": ["./plugin/index.js"]
}
}

The path is relative to the config file path.

You should export an async function which receives a parameters

  • app (FastifyInstance) that is the main fastify instance
  • opts all the options specified in the config file after path

Hot Reload

Plugin file is being watched by fs.watch function.

You don't need to reload Platformatic Composer server while working on your plugin. Every time you save, the watcher will trigger a reload event and the server will auto-restart and load your updated code.

tip

At this time, on Linux, file watch in subdirectories is not supported due to a Node.js limitation (documented here).

Directories

The path can also be a directory. In that case, the directory will be loaded with @fastify/autoload.

Consider the following directory structure:

├── routes
│ ├── foo
│ │ ├── something.js
│ │ └── bar
│ │ └── baz.js
│ ├── single-plugin
│ │ └── utils.js
│ └── another-plugin.js
└── platformatic.composer.json

By default the folder will be added as a prefix to all the routes defined within them. +See the autoload documentation for all the options to customize this behavior.

Multiple plugins

Multiple plugins can be loaded in parallel by specifying an array:

{
...
"plugins": {
"paths": [{
"path": "./plugin/index.js"
}, {
"path": "./routes/"
}]
}
}
+ + + + \ No newline at end of file diff --git a/docs/next/reference/composer/programmatic/index.html b/docs/next/reference/composer/programmatic/index.html new file mode 100644 index 00000000000..14d58f8a4a6 --- /dev/null +++ b/docs/next/reference/composer/programmatic/index.html @@ -0,0 +1,18 @@ + + + + + +Programmatic API | Platformatic Open Source Software + + + + + +
+
Version: Next

Programmatic API

In many cases it's useful to start Platformatic Composer using an API instead of +command line, e.g. in tests we want to start and stop our server.

The buildServer function allows that:

import { buildServer } from '@platformatic/composer'

const app = await buildServer('path/to/platformatic.composer.json')
await app.start()

const res = await fetch(app.url)
console.log(await res.json())

// do something

await app.close()

It is also possible to customize the configuration:

import { buildServer } from '@platformatic/composer'

const app = await buildServer({
server: {
hostname: '127.0.0.1',
port: 0
},
services: [
{
id: 'auth-service',
origin: 'https://auth-service.com',
openapi: {
url: '/documentation/json',
prefix: 'auth'
}
},
{
id: 'payment-service',
origin: 'https://payment-service.com',
openapi: {
file: './schemas/payment-service.json'
}
}
]
})

await app.start()

const res = await fetch(app.url)
console.log(await res.json())

// do something

await app.close()
+ + + + \ No newline at end of file diff --git a/docs/next/reference/db/authorization/introduction/index.html b/docs/next/reference/db/authorization/introduction/index.html new file mode 100644 index 00000000000..f0163f8d616 --- /dev/null +++ b/docs/next/reference/db/authorization/introduction/index.html @@ -0,0 +1,21 @@ + + + + + +Authorization | Platformatic Open Source Software + + + + + +
+
Version: Next

Authorization

Introduction

Authorization in Platformatic DB is role-based. User authentication and the +assignment of roles must be handled by an external authentication service.

Configuration

Authorization strategies and rules are configured via a Platformatic DB +configuration file. See the Platformatic DB Configuration +documentation for the supported settings.

Bypass authorization in development

To make testing and developing easier, it's possible to bypass authorization checks +if an adminSecret is set. See the HTTP headers (development only) documentation.

+ + + + \ No newline at end of file diff --git a/docs/next/reference/db/authorization/rules/index.html b/docs/next/reference/db/authorization/rules/index.html new file mode 100644 index 00000000000..0df0e7a933f --- /dev/null +++ b/docs/next/reference/db/authorization/rules/index.html @@ -0,0 +1,28 @@ + + + + + +Rules | Platformatic Open Source Software + + + + + +
+
Version: Next

Rules

Introduction

Authorization rules can be defined to control what operations users are +able to execute via the REST or GraphQL APIs that are exposed by a Platformatic +DB app.

Every rule must specify:

  • role (required) — A role name. It's a string and must match with the role(s) set by an external authentication service.
  • entity (optional) — The Platformatic DB entity to apply this rule to.
  • entities (optional) — The Platformatic DB entities to apply this rule to.
  • defaults (optional) — Configure entity fields that will be +automatically set from user data.
  • One entry for each supported CRUD operation: find, save, delete

One of entity and entities must be specified.

Operation checks

Every entity operation — such as find, insert, save or delete — can have +authorization checks specified for them. This value can be false (operation disabled) +or true (operation enabled with no checks).

To specify more fine-grained authorization controls, add a checks field, e.g.:

{
"role": "user",
"entity": "page",
"find": {
"checks": {
"userId": "X-PLATFORMATIC-USER-ID"
}
},
...
}

In this example, when a user with a user role executes a findPage, they can +access all the data that has userId equal to the value in user metadata with +key X-PLATFORMATIC-USER-ID.

Note that "userId": "X-PLATFORMATIC-USER-ID" is syntactic sugar for:

      "find": {
"checks": {
"userId": {
"eq": "X-PLATFORMATIC-USER-ID"
}
}
}

It's possible to specify more complex rules using all the supported where clause operators.

Note that userId MUST exist as a field in the database table to use this feature.

GraphQL events and subscriptions

Platformatic DB supports GraphQL subscriptions and therefore db-authorization must protect them. +The check is performed based on the find permissions, the only permissions that are supported are:

  1. find: false, the subscription for that role is disabled
  2. find: { checks: { [prop]: 'X-PLATFORMATIC-PROP' } } validates that the given prop is equal
  3. find: { checks: { [prop]: { eq: 'X-PLATFORMATIC-PROP' } } } validates that the given prop is equal

Conflicting rules across roles for different equality checks will not be supported.

Restrict access to entity fields

If a fields array is present on an operation, Platformatic DB restricts the columns on which the user can execute to that list. +For save operations, the configuration must specify all the not-nullable fields (otherwise, it would fail at runtime). +Platformatic does these checks at startup.

Example:

    "rule": {
"entity": "page",
"role": "user",
"find": {
"checks": {
"userId": "X-PLATFORMATIC-USER-ID"
},
"fields": ["id", "title"]
}
...
}

In this case, only id and title are returned for a user with a user role on the page entity.

Set entity fields from user metadata

Defaults are used in database insert and are default fields added automatically populated from user metadata, e.g.:

        "defaults": {
"userId": "X-PLATFORMATIC-USER-ID"
},

When an entity is created, the userId column is used and populated using the value from user metadata.

Programmatic rules

If it's necessary to have more control over the authorizations, it's possible to specify the rules programmatically, e.g.:


app.register(auth, {
jwt: {
secret: 'supersecret'
},
rules: [{
role: 'user',
entity: 'page',
async find ({ user, ctx, where }) {
return {
...where,
userId: {
eq: user['X-PLATFORMATIC-USER-ID']
}
}
},
async delete ({ user, ctx, where }) {
return {
...where,
userId: {
eq: user['X-PLATFORMATIC-USER-ID']
}
}
},
defaults: {
userId: async function ({ user, ctx, input }) {
match(user, {
'X-PLATFORMATIC-USER-ID': generated.shift(),
'X-PLATFORMATIC-ROLE': 'user'
})
return user['X-PLATFORMATIC-USER-ID']
}

},
async save ({ user, ctx, where }) {
return {
...where,
userId: {
eq: user['X-PLATFORMATIC-USER-ID']
}
}
}
}]
})

In this example, the user role can delete all the posts edited before yesterday:

 app.register(auth, {
jwt: {
secret: 'supersecret'
},
roleKey: 'X-PLATFORMATIC-ROLE',
anonymousRole: 'anonymous',
rules: [{
role: 'user',
entity: 'page',
find: true,
save: true,
async delete ({ user, ctx, where }) {
return {
...where,
editedAt: {
lt: yesterday
}
}
},
defaults: {
userId: 'X-PLATFORMATIC-USER-ID'
}
}]
})

Access validation on entity mapper for plugins

To assert that a specific user with it's role(s) has the correct access rights to use entities on a platformatic plugin the context should be passed to the entity mapper in order to verify it's permissions like this:

//plugin.js

app.post('/', async (req, reply) => {
const ctx = req.createPlatformaticCtx()

await app.platformatic.entities.movie.find({
where: { /*...*/ },
ctx
})
})

Skip authorization rules

In custom plugins, it's possible to skip the authorization rules on entities programmatically by setting the skipAuth flag to true or not passing a ctx, e.g.:

// this works even if the user's role doesn't have the `find` permission.
const result = await app.platformatic.entities.page.find({skipAuth: true, ...})

This has the same effect:

// this works even if the user's role doesn't have the `find` permission
const result = await app.platformatic.entities.page.find() // no `ctx`

This is useful for custom plugins for which the authentication is not necessary, so there is no user role set when invoked.

info

Skip authorization rules is not possible on the automatically generated REST and GraphQL APIs.

Avoid repetition of the same rule multiple times

Very often we end up writing the same rules over and over again. +Instead, it's possible to condense the rule for multiple entities on a single entry:

 app.register(auth, {
jwt: {
secret: 'supersecret'
},
roleKey: 'X-PLATFORMATIC-ROLE',
anonymousRole: 'anonymous',
rules: [{
role: 'anonymous',
entities: ['category', 'page'],
find: true,
delete: false,
save: false
}]
})
+ + + + \ No newline at end of file diff --git a/docs/next/reference/db/authorization/strategies/index.html b/docs/next/reference/db/authorization/strategies/index.html new file mode 100644 index 00000000000..70fac5fc012 --- /dev/null +++ b/docs/next/reference/db/authorization/strategies/index.html @@ -0,0 +1,40 @@ + + + + + +Strategies | Platformatic Open Source Software + + + + + +
+
Version: Next

Strategies

Introduction

Platformatic DB supports the following authorization strategies:

JSON Web Token (JWT)

The JSON Web Token (JWT) authorization strategy is built on top +of the @fastify/jwt Fastify plugin.

Platformatic DB JWT integration

To configure it, the quickest way is to pass a shared secret in your +Platformatic DB configuration file, for example:

platformatic.db.json
{
"authorization": {
"jwt": {
"secret": "<shared-secret>"
}
}
}

By default @fastify/jwt looks for a JWT in an HTTP request's Authorization +header. This requires HTTP requests to the Platformatic DB API to include an +Authorization header like this:

Authorization: Bearer <token>

See the @fastify/jwt documentation +for all of the available configuration options.

JSON Web Key Sets (JWKS)

The JWT authorization strategy includes support for JSON Web Key Sets.

To configure it:

platformatic.db.json
{
"authorization": {
"jwt": {
"jwks": {
"allowedDomains": [
"https://ISSUER_DOMAIN"
]
}
}
}
}

When a JSON Web Token is included in a request to Platformatic DB, it retrieves the +correct public key from https:/ISSUER_DOMAIN/.well-known/jwks.json and uses it to +verify the JWT signature. The token carries all the informations, like the kid, +which is the key id used to sign the token itself, so no other configuration is required.

JWKS can be enabled without any options:

platformatic.db.json
{
"authorization": {
"jwt": {
"jwks": true
}
}
}

When configured like this, the JWK URL is calculated from the iss (issuer) field of JWT, so +every JWT token from an issuer that exposes a valid JWKS token will pass the validation. +This configuration should only be used in development, while +in every other case the allowedDomains option should be specified.

Any option supported by the get-jwks +library can be specified in the authorization.jwt.jwks object.

JWT Custom Claim Namespace

JWT claims can be namespaced to avoid name collisions. If so, we will receive tokens +with custom claims such as: https://platformatic.dev/X-PLATFORMATIC-ROLE +(where https://platformatic.cloud/ is the namespace). +If we want to map these claims to user metadata removing our namespace, we can +specify the namespace in the JWT options:

platformatic.db.json
{
"authorization": {
"jwt": {
"namespace": "https://platformatic.dev/"
}
}
}

With this configuration, the https://platformatic.dev/X-PLATFORMATIC-ROLE claim +is mapped to X-PLATFORMATIC-ROLE user metadata.

Webhook

Platformatic DB can use a webhook to authenticate requests.

Platformatic DB Webhook integration

In this case, the URL is configured on authorization:

platformatic.db.json
{
"authorization": {
"webhook": {
"url": "<webhook url>"
}
}
}

When a request is received, Platformatic sends a POST to the webhook, replicating +the same body and headers, except for:

  • host
  • connection

In the Webhook case, the HTTP response contains the roles/user information as HTTP headers.

HTTP headers (development only)

danger

Passing an admin API key via HTTP headers is highly insecure and should only be used +during development or within protected networks.

If a request has X-PLATFORMATIC-ADMIN-SECRET HTTP header set with a valid adminSecret +(see configuration reference) the +role is set automatically as platformatic-admin, unless a different role is set for +user impersonation (which is disabled if JWT or Webhook are set, see below).

Platformatic DB HTTP Headers

Also, the following rule is automatically added to every entity, allowing the user +that presented the adminSecret to perform any operation on any entity:

{
"role": "platformatic-admin",
"find": false,
"delete": false,
"save": false
}
+ + + + \ No newline at end of file diff --git a/docs/next/reference/db/authorization/user-roles-metadata/index.html b/docs/next/reference/db/authorization/user-roles-metadata/index.html new file mode 100644 index 00000000000..9da6acc85be --- /dev/null +++ b/docs/next/reference/db/authorization/user-roles-metadata/index.html @@ -0,0 +1,31 @@ + + + + + +User Roles & Metadata | Platformatic Open Source Software + + + + + +
+
Version: Next

User Roles & Metadata

Introduction

Roles and user information are passed to Platformatic DB from an external +authentication service as a string (JWT claims or HTTP headers). We refer to +this data as user metadata.

Roles

Users can have a list of roles associated with them. These roles can be specified +in an X-PLATFORMATIC-ROLE property as a list of comma separated role names +(the key name is configurable).

Note that role names are just strings.

Reserved roles

Some special role names are reserved by Platformatic DB:

  • platformatic-admin : this identifies a user who has admin powers
  • anonymous: set automatically when no roles are associated

Anonymous role

If a user has no role, the anonymous role is assigned automatically. It's possible +to specify rules to apply to users with this role:

    {
"role": "anonymous",
"entity": "page",
"find": false,
"delete": false,
"save": false
}

In this case, a user that has no role or explicitly has the anonymous role +cannot perform any operations on the page entity.

Role impersonation

If a request includes a valid X-PLATFORMATIC-ADMIN-SECRET HTTP header it is +possible to impersonate a user roles. The roles to impersonate can be specified +by sending a X-PLATFORMATIC-ROLE HTTP header containing a comma separated list +of roles.

note

When JWT or Webhook are set, user role impersonation is not enabled, and the role +is always set as platfomatic-admin automatically if the X-PLATFORMATIC-ADMIN-SECRET +HTTP header is specified.

Role configuration

The roles key in user metadata defaults to X-PLATFORMATIC-ROLE. It's possible to change it using the roleKey field in configuration. Same for the anonymous role, which value can be changed using anonymousRole.

 "authorization": {
"roleKey": "X-MYCUSTOM-ROLE_KEY",
"anonymousRole": "anonym",
"rules": [
...
]
}

User metadata

User roles and other user data, such as userId, are referred to by Platformatic +DB as user metadata.

User metadata is parsed from an HTTP request and stored in a user object on the +Fastify request object. This object is populated on-demand, but it's possible +to populate it explicity with await request.setupDBAuthorizationUser().

+ + + + \ No newline at end of file diff --git a/docs/next/reference/db/configuration/index.html b/docs/next/reference/db/configuration/index.html new file mode 100644 index 00000000000..bf427254b7a --- /dev/null +++ b/docs/next/reference/db/configuration/index.html @@ -0,0 +1,59 @@ + + + + + +Configuration | Platformatic Open Source Software + + + + + +
+
Version: Next

Configuration

Platformatic DB is configured with a configuration file. It supports the use +of environment variables as setting values with configuration placeholders.

Configuration file

If the Platformatic CLI finds a file in the current working directory matching +one of these filenames, it will automatically load it:

  • platformatic.db.json
  • platformatic.db.json5
  • platformatic.db.yml or platformatic.db.yaml
  • platformatic.db.tml or platformatic.db.toml

Alternatively, a --config option with a configuration +filepath can be passed to most platformatic db CLI commands.

The configuration examples in this reference use JSON.

Supported formats

FormatExtensions
JSON.json
JSON5.json5
YAML.yml, .yaml
TOML.tml

Comments are supported by the JSON5, YAML and TOML file formats.

Settings

Configuration settings are organised into the following groups:

Sensitive configuration settings, such as a database connection URL that contains +a password, should be set using configuration placeholders.

db

A required object with the following settings:

  • connectionString (required, string) — Database connection URL.

    • Example: postgres://user:password@my-database:5432/db-name
  • schema (array of string) - Currently supported only for postgres, schemas used tolook for entities. If not provided, the default public schema is used.

    Examples

  "db": {
"connectionString": "(...)",
"schema": [
"schema1", "schema2"
],
...

},

  • Platformatic DB supports MySQL, MariaDB, PostgreSQL and SQLite.

  • graphql (boolean or object, default: true) — Controls the GraphQL API interface, with optional GraphiQL UI.

    Examples

    Enables GraphQL support

    {
    "db": {
    ...
    "graphql": true
    }
    }

    Enables GraphQL support with GraphiQL

    {
    "db": {
    ...
    "graphql": {
    "graphiql": true
    }
    }
    }

    It's possible to selectively ignore entites:

    {
    "db": {
    ...
    "graphql": {
    "ignore": {
    "categories": true
    }
    }
    }
    }

    It's possible to selectively ignore fields:

    {
    "db": {
    ...
    "graphql": {
    "ignore": {
    "categories": {
    "name": true
    }
    }
    }
    }
    }

    It's possible to add a custom GraphQL schema during the startup:

    {
    "db": {
    ...
    "graphql": {
    "schemaPath": "path/to/schema.graphql"
    }
    }
    }
    }
  • openapi (boolean or object, default: true) — Enables OpenAPI REST support.

    • If value is an object, all OpenAPI v3 allowed properties can be passed. Also a prefix property can be passed to set the OpenAPI prefix.
    • Platformatic DB uses @fastify/swagger under the hood to manage this configuration.

    Examples

    Enables OpenAPI

    {
    "db": {
    ...
    "openapi": true
    }
    }

    Enables OpenAPI with prefix

    {
    "db": {
    ...
    "openapi": {
    "prefix": "/api"
    }
    }
    }

    Enables OpenAPI with options

    {
    "db": {
    ...
    "openapi": {
    "info": {
    "title": "Platformatic DB",
    "description": "Exposing a SQL database as REST"
    }
    }
    }
    }

    You can for example add the security section, so that Swagger will allow you to add the authentication header to your requests. +In the following code snippet, we're adding a Bearer token in the form of a JWT:

    {
    "db": {
    ...
    "openapi": {
    ...
    "security": [{ "bearerAuth": [] }],
    "components": {
    "securitySchemes": {
    "bearerAuth": {
    "type": "http",
    "scheme": "bearer",
    "bearerFormat": "JWT"
    }
    }
    }
    }
    }
    }

    It's possible to selectively ignore entites:

    {
    "db": {
    ...
    "openapi": {
    "ignore": {
    "categories": true
    }
    }
    }
    }

    It's possible to selectively ignore fields:

    {
    "db": {
    ...
    "openapi": {
    "ignore": {
    "categories": {
    "name": true
    }
    }
    }
    }
    }
  • ignore (object) — Key/value object that defines which database tables should not be mapped as API entities.

    Examples

    {
    "db": {
    ...
    "ignore": {
    "versions": true // "versions" table will be not mapped with GraphQL/REST APIs
    }
    }
    }
  • events (boolean or object, default: true) — Controls the support for events published by the SQL mapping layer. +If enabled, this option add support for GraphQL Subscription over WebSocket. By default it uses an in-process message broker. +It's possible to configure it to use Redis instead.

    Examples

    {
    "db": {
    ...
    "events": {
    "connectionString": "redis://:password@redishost.com:6380/"
    }
    }
    }
  • schemalock (boolean or object, default: false) — Controls the caching of the database schema on disk. +If set to true the database schema metadata is stored inside a schema.lock file. +It's also possible to configure the location of that file by specifying a path, like so:

    Examples

    {
    "db": {
    ...
    "schemalock": {
    "path": "./dbmetadata"
    }
    }
    }

    Starting Platformatic DB or running a migration will automatically create the schemalock file.

metrics

Configuration for a Prometheus server that will export monitoring metrics +for the current server instance. It uses fastify-metrics +under the hood.

This setting can be a boolean or an object. If set to true the Prometheus server will listen on http://0.0.0.0:9090.

Supported object properties:

  • hostname (string) — The hostname where Prometheus server will listen for connections.
  • port (number) — The port where Prometheus server will listen for connections.
  • auth (object) — Basic Auth configuration. username and password are required here +(use environment variables).

migrations

Configures Postgrator to run migrations against the database.

An optional object with the following settings:

  • dir (required, string): Relative path to the migrations directory.
  • autoApply (boolean, default: false): Automatically apply migrations when Platformatic DB server starts.

plugins

An optional object that defines the plugins loaded by Platformatic DB.

  • paths (required, array): an array of paths (string) +or an array of objects composed as follows,
    • path (string): Relative path to plugin's entry point.
    • options (object): Optional plugin options.
    • encapsulate (boolean): if the path is a folder, it instruct Platformatic to not +encapsulate those plugins, +allowing decorators and hooks to be shared across all routes.
    • maxDepth (integer): if the path is a folder, it limits the depth to load the content from.
  • typescript (boolean or object): enable TypeScript compilation. A tsconfig.json file is required in the same folder.
{
"plugins": {
"paths": [{
"path": "./my-plugin.js",
"options": {
"foo": "bar"
}
}]
}
}

typescript compilation options

The typescript option can also be an object to customize the compilation. Here are the supported options:

  • enabled (boolean): enables compilation
  • tsConfig (string): path to the tsconfig.json file relative to the configuration
  • outDir (string): the output directory of tsconfig.json, in case tsconfig.json is not available +and and enabled is set to false (procution build)
  • flags (array of string): flags to be passed to tsc. Overrides tsConfig. +

Example:

{
"plugins": {
"paths": [{
"path": "./my-plugin.js",
"options": {
"foo": "bar"
}
}],
"typescript": {
"enabled": false,
"tsConfig": "./path/to/tsconfig.json",
"outDir": "dist"
}
}
}

watch

Disable watching for file changes if set to false. It can also be customized with the following options:

  • ignore (string[], default: null): List of glob patterns to ignore when watching for changes. If null or not specified, ignore rule is not applied. Ignore option doesn't work for typescript files.

  • allow (string[], default: ['*.js', '**/*.js']): List of glob patterns to allow when watching for changes. If null or not specified, allow rule is not applied. Allow option doesn't work for typescript files.

    Example

    {
    "watch": {
    "ignore": ["*.mjs", "**/*.mjs"],
    "allow": ["my-plugin.js", "plugins/*.js"]
    }
    }

server

A required object with the following settings:

  • hostname (required, string) — Hostname where Platformatic DB server will listen for connections.

  • port (required, number) — Port where Platformatic DB server will listen for connections.

  • healthCheck (boolean or object) — Enables the health check endpoint.

    • Powered by @fastify/under-pressure.
    • The value can be an object, used to specify the interval between checks in milliseconds (default: 5000)

    Example

    {
    "server": {
    ...
    "healthCheck": {
    "interval": 2000
    }
    }
    }
  • cors (object) — Configuration for Cross-Origin Resource Sharing (CORS) headers.

    • All options will be passed to the @fastify/cors plugin. In order to specify a RegExp object, you can pass { regexp: 'yourregexp' }, +it will be automatically converted
  • https (object) - Configuration for HTTPS supporting the following options.

    • key (required, string, object, or array) - If key is a string, it specifies the private key to be used. If key is an object, it must have a path property specifying the private key file. Multiple keys are supported by passing an array of keys.
    • cert (required, string, object, or array) - If cert is a string, it specifies the certificate to be used. If cert is an object, it must have a path property specifying the certificate file. Multiple certificates are supported by passing an array of keys.
  • logger (object) -- the logger configuration.

  • pluginTimeout (integer) -- the number of milliseconds to wait for a Fastify plugin to load

  • bodyLimit (integer) -- the maximum request body size in bytes

  • maxParamLength (integer) -- the maximum length of a request parameter

  • caseSensitive (boolean) -- if true, the router will be case sensitive

  • ignoreTrailingSlash (boolean) -- if true, the router will ignore the trailing slash

  • ignoreTrailingSlash (boolean) -- if true, the router will ignore the trailing slash

  • connectionTimeout (integer) -- the milliseconds to wait for a new HTTP request

  • keepAliveTimeout (integer) -- the milliseconds to wait for a keep-alive HTTP request

  • maxRequestsPerSocket (integer) -- the maximum number of requests per socket

  • forceCloseConnections (boolean or "idle") -- if true, the server will close all connections when it is closed

  • requestTimeout (integer) -- the milliseconds to wait for a request to be completed

  • disableRequestLogging (boolean) -- if true, the request logger will be disabled

  • exposeHeadRoutes (boolean) -- if true, the router will expose HEAD routes

  • serializerOpts (object) -- the serializer options

  • requestIdHeader (string or false) -- the name of the header that will contain the request id

  • requestIdLogLabel (string) -- Defines the label used for the request identifier when logging the request. default: 'reqId'

  • jsonShorthand (boolean) -- default: true -- visit fastify docs for more details

  • trustProxy (boolean or integer or string or String[]) -- default: false -- visit fastify docs for more details

tip

See the fastify docs for more details.

authorization

An optional object with the following settings:

  • adminSecret (string): A secret that should be sent in an +x-platformatic-admin-secret HTTP header when performing GraphQL/REST API +calls. Use an environment variable placeholder +to securely provide the value for this setting.
  • roleKey (string, default: X-PLATFORMATIC-ROLE): The name of the key in user +metadata that is used to store the user's roles. See Role configuration.
  • anonymousRole (string, default: anonymous): The name of the anonymous role. See Role configuration.
  • jwt (object): Configuration for the JWT authorization strategy. +Any option accepted by @fastify/jwt +can be passed in this object.
  • webhook (object): Configuration for the Webhook authorization strategy.
    • url (required, string): Webhook URL that Platformatic DB will make a +POST request to.
  • rules (array): Authorization rules that describe the CRUD actions that +users are allowed to perform against entities. See Rules +documentation.
note

If an authorization object is present, but no rules are specified, no CRUD +operations are allowed unless adminSecret is passed.

Example

platformatic.db.json
{
"authorization": {
"jwt": {
"secret": "{PLT_AUTHORIZATION_JWT_SECRET}"
},
"rules": [
...
]
}
}

telemetry

Open Telemetry is optionally supported with these settings:

  • serviceName (required, string) — Name of the service as will be reported in open telemetry.
  • version (string) — Optional version (free form)
  • skip (array). Optional list of operations to skip when exporting telemetry defined object with properties:
    • method: GET, POST, PUT, DELETE, PATCH, HEAD, OPTIONS, TRACE
    • path. e.g.: /documentation/json
  • exporter (object or array) — Exporter configuration. If not defined, the exporter defaults to console. If an array of objects is configured, every object must be a valid exporter object. The exporter object has the following properties:
    • type (string) — Exporter type. Supported values are console, otlp, zipkin and memory (default: console). memory is only supported for testing purposes.
    • options (object) — These options are supported:
      • url (string) — The URL to send the telemetry to. Required for otlp exporter. This has no effect on console and memory exporters.
      • headers (object) — Optional headers to send with the telemetry. This has no effect on console and memory exporters.

Note that OTLP traces can be consumed by different solutions, like Jaeger. Here the full list.

Example

{
"telemetry": {
"serviceName": "test-service",
"exporter": {
"type": "otlp",
"options": {
"url": "http://localhost:4318/v1/traces"
}
}
}
}

Environment variable placeholders

The value for any configuration setting can be replaced with an environment variable +by adding a placeholder in the configuration file, for example {PLT_SERVER_LOGGER_LEVEL}.

All placeholders in a configuration must be available as an environment variable +and must meet the allowed placeholder name rules.

Example

platformatic.db.json
{
"db": {
"connectionString": "{DATABASE_URL}"
},
"server": {
"logger": {
"level": "{PLT_SERVER_LOGGER_LEVEL}"
},
"port": "{PORT}"
}
}

Platformatic will replace the placeholders in this example with the environment +variables of the same name.

Setting environment variables

If a .env file exists it will automatically be loaded by Platformatic using +dotenv. For example:

.env
PLT_SERVER_LOGGER_LEVEL=info
PORT=8080

The .env file must be located in the same folder as the Platformatic configuration +file or in the current working directory.

Environment variables can also be set directly on the commmand line, for example:

PLT_SERVER_LOGGER_LEVEL=debug npx platformatic db

Allowed placeholder names

Only placeholder names prefixed with PLT_, or that are in this allow list, will be +dynamically replaced in the configuration file:

  • PORT
  • DATABASE_URL

This restriction is to avoid accidentally exposing system environment variables. +An error will be raised by Platformatic if it finds a configuration placeholder +that isn't allowed.

The default allow list can be extended by passing a --allow-env CLI option with a +comma separated list of strings, for example:

npx platformatic db start --allow-env=HOST,SERVER_LOGGER_LEVEL
# OR
npx platformatic start --allow-env=HOST,SERVER_LOGGER_LEVEL

If --allow-env is passed as an option to the CLI, it will be merged with the +default allow list.

Sample Configuration

This is a bare minimum configuration for Platformatic DB. Uses a local ./db.sqlite SQLite database, with OpenAPI and GraphQL support.

Server will listen to http://127.0.0.1:3042

{
"server": {
"hostname": "127.0.0.1",
"port": "3042"
},
"db": {
"connectionString": "sqlite://./db.sqlite",
"graphiql": true,
"openapi": true,
"graphql": true
}
}
+ + + + \ No newline at end of file diff --git a/docs/next/reference/db/introduction/index.html b/docs/next/reference/db/introduction/index.html new file mode 100644 index 00000000000..2f06d95ee71 --- /dev/null +++ b/docs/next/reference/db/introduction/index.html @@ -0,0 +1,25 @@ + + + + + +Platformatic DB | Platformatic Open Source Software + + + + + +
+
Version: Next

Platformatic DB

Platformatic DB is an HTTP server that provides a flexible set of tools for +building robust APIs with Node.js.

For a high level overview of how Platformatic DB works, please reference the +Architecture guide.

info

Platformatic DB is currently in public beta.

Features

info

Get up and running in 2 minutes using our +Quick Start Guide

Supported databases

DatabaseVersion
SQLite3.
PostgreSQL>= 15
MySQL>= 5.7
MariaDB>= 10.11

The required database driver is automatically inferred and loaded based on the +value of the connectionString +configuration setting.

Public beta

Platformatic DB is in public beta. You can use it in production, but it's quite +likely that you'll encounter significant bugs.

If you run into a bug or have a suggestion for improvement, please +raise an issue on GitHub.

+ + + + \ No newline at end of file diff --git a/docs/next/reference/db/logging/index.html b/docs/next/reference/db/logging/index.html new file mode 100644 index 00000000000..19a0493db62 --- /dev/null +++ b/docs/next/reference/db/logging/index.html @@ -0,0 +1,25 @@ + + + + + +Logging | Platformatic Open Source Software + + + + + +
+
Version: Next

Logging

Platformatic DB uses a low overhead logger named Pino +to output structured log messages.

Logger output level

By default the logger output level is set to info, meaning that all log messages +with a level of info or above will be output by the logger. See the +Pino documentation +for details on the supported log levels.

The logger output level can be overriden by adding a logger object to the server +configuration settings group:

platformatic.db.json
{
"server": {
"logger": {
"level": "error"
},
...
},
...
}

Log formatting

If you run Platformatic DB in a terminal, where standard out (stdout) +is a TTY:

  • pino-pretty is automatically used +to pretty print the logs and make them easier to read during development.
  • The Platformatic logo is printed (if colors are supported in the terminal emulator)

Example:

$ npx platformatic db start




/////////////
///// /////
/// ///
/// ///
/// ///
&& /// /// &&
&&&&&& /// /// &&&&&&
&&&& /// /// &&&&
&&& /// /// &&&&&&&&&&&&
&&& /// /////// //// && &&&&&
&& /// /////////////// &&&
&&& /// /// &&&
&&& /// // &&
&&& /// &&
&&& /// &&&
&&&& /// &&&
&&&&& /// &&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&
///
///
///
///
///
///


[11:20:33.466] INFO (337606): server listening
url: "http://127.0.0.1:3042"

If stdout is redirected to a non-TTY, the logo is not printed and the logs are +formatted as newline-delimited JSON:

$ npx platformatic db start | head
{"level":30,"time":1665566628973,"pid":338365,"hostname":"darkav2","url":"http://127.0.0.1:3042","msg":"server listening"}
+ + + + \ No newline at end of file diff --git a/docs/next/reference/db/migrations/index.html b/docs/next/reference/db/migrations/index.html new file mode 100644 index 00000000000..7db5806fbd3 --- /dev/null +++ b/docs/next/reference/db/migrations/index.html @@ -0,0 +1,17 @@ + + + + + +Migrations | Platformatic Open Source Software + + + + + +
+
Version: Next

Migrations

It uses Postgrator under the hood to run migrations. Please refer to the Postgrator documentation for guidance on writing migration files.

In brief, you should create a file structure like this

migrations/
|- 001.do.sql
|- 001.undo.sql
|- 002.do.sql
|- 002.undo.sql
|- 003.do.sql
|- 003.undo.sql
|- 004.do.sql
|- 004.undo.sql
|- ... and so on

Postgrator uses a table in your schema, to store which migrations have been already processed, so that only new ones will be applied at every server start.

You can always rollback some migrations specifing what version you would like to rollback to.

Example

$ platformatic db migrations apply --to 002

Will execute 004.undo.sql, 003.undo.sql in this order. If you keep those files in migrations directory, when the server restarts it will execute 003.do.sql and 004.do.sql in this order if the autoApply value is true, or you can run the db migrations apply command.

It's also possible to rollback a single migration with -r:

$ platformatic db migrations apply -r 

How to run migrations

There are two ways to run migrations in Platformatic DB. They can be processed automatically when the server starts if the autoApply value is true, or you can just run the db migrations apply command.

In both cases you have to edit your config file to tell Platformatic DB where are your migration files.

Automatically on server start

To run migrations when Platformatic DB starts, you need to use the config file root property migrations.

There are two options in the "migrations" property

  • dir (required) the directory where the migration files are located. It will be relative to the config file path.
  • autoApply a boolean value that tells Platformatic DB to auto-apply migrations or not (default: false)

Example

{
...
"migrations": {
"dir": "./path/to/migrations/folder",
"autoApply": false
}
}

Manually with the CLI

See documentation about db migrations apply command

In short:

  • be sure to define a correct migrations.dir folder under the config on platformatic.db.json
  • get the MIGRATION_NUMBER (f.e. if the file is named 002.do.sql will be 002)
  • run npx platformatic db migrations apply --to MIGRATION_NUMBER
+ + + + \ No newline at end of file diff --git a/docs/next/reference/db/plugin/index.html b/docs/next/reference/db/plugin/index.html new file mode 100644 index 00000000000..14ec23cde45 --- /dev/null +++ b/docs/next/reference/db/plugin/index.html @@ -0,0 +1,19 @@ + + + + + +Plugin | Platformatic Open Source Software + + + + + +
+
Version: Next

Plugin

If you want to extend Platformatic DB features, it is possible to register a plugin, which will be in the form of a standard Fastify plugin.

The config file will specify where the plugin file is located as the example below:

{
...
"plugins": {
"paths": ["./plugin/index.js"]
}
}

The paths are relative to the config file path.

Once the config file is set up, you can write your plugin to extend Platformatic DB API or write your custom business logic.

You should export an async function which receives a parameters

  • app (FastifyInstance) that is the main fastify instance running Platformatic DB
  • opts all the options specified in the config file after path
  • You can always access Platformatic data mapper through app.platformatic property.
info

To make sure that a user has the appropriate set of permissions to perform any action on an entity the context should be passed to the entity mapper operation like this:

app.post('/', async (req, reply) => {
const ctx = req.createPlatformaticCtx()

await app.platformatic.entities.movies.find({
where: { /*...*/ },
ctx
})
})

Check some examples.

Hot Reload

Plugin file is being watched by fs.watch function.

You don't need to reload Platformatic DB server while working on your plugin. Every time you save, the watcher will trigger a reload event and the server will auto-restart and load your updated code.

tip

At this time, on Linux, file watch in subdirectories is not supported due to a Node.js limitation (documented here).

Directories

The path can also be a directory. In that case, the directory will be loaded with @fastify/autoload.

Consider the following directory structure:

├── routes
│ ├── foo
│ │ ├── something.js
│ │ └── bar
│ │ └── baz.js
│ ├── single-plugin
│ │ └── utils.js
│ └── another-plugin.js
└── platformatic.service.json

By default the folder will be added as a prefix to all the routes defined within them. +See the autoload documentation for all the options to customize this behavior.

Multiple plugins

Multiple plugins can be loaded in parallel by specifying an array:

{
...
"plugins": {
"paths": [{
"path": "./plugin/index.js"
}, {
"path": "./routes/"
}]
}
}

fastify.swagger()

TypeScript and autocompletion

If you want to access any of the types provided by Platformatic DB, generate them using the platformatic db types command. +This will create a global.d.ts file that you can now import everywhere, like so:

/// <references <types="./global.d.ts" />

Remember to adjust the path to global.d.ts.

Plugin definition with TypeScript

Here is an example of writing a plugin in TypeScript:

/// <reference types="./global.d.ts" />
import { FastifyInstance, FastifyPluginOptions } from 'fastify'

export default async function (fastify: FastifyInstance, opts: FastifyPluginOptions) {
}

Note that you need to add the "plugins": { "typescript": true } configuration to your platformatic.service.json.

+ + + + \ No newline at end of file diff --git a/docs/next/reference/db/programmatic/index.html b/docs/next/reference/db/programmatic/index.html new file mode 100644 index 00000000000..38dd0322917 --- /dev/null +++ b/docs/next/reference/db/programmatic/index.html @@ -0,0 +1,17 @@ + + + + + +Programmatic API | Platformatic Open Source Software + + + + + +
+
Version: Next

Programmatic API

It's possible to start an instance of Platformatic DB from JavaScript.

import { buildServer } from '@platformatic/db'

const app = await buildServer('/path/to/platformatic.db.json')

await app.start() // this will start our server

console.log('URL', app.url)

const res = await fetch(app.url)
console.log(await res.json())

// do something

await app.close()

It is also possible to customize the configuration:

import { buildServer } from '@platformatic/db'

const app = await buildServer({
server: {
hostname: '127.0.0.1',
port: 0
},
db: {
connectionString: 'sqlite://test.sqlite'
},
})

await app.start() // this will start our server

console.log('URL', app.url)

const res = await fetch(app.url)
console.log(await res.json())

// do something

await app.close()

For more details on how this is implemented, read Platformatic Service Programmatic API.

API

buildServer(config)

Returns an instance of the restartable application

RestartableApp

.start()

Listen to the hostname/port combination specified in the config.

.restart()

Restart the Fastify application

.close()

Stops the application.

+ + + + \ No newline at end of file diff --git a/docs/next/reference/db/schema-support/index.html b/docs/next/reference/db/schema-support/index.html new file mode 100644 index 00000000000..ea9690fad79 --- /dev/null +++ b/docs/next/reference/db/schema-support/index.html @@ -0,0 +1,21 @@ + + + + + +Schema support | Platformatic Open Source Software + + + + + +
+
Version: Next

Schema support

It's possible to specify the schemas where the tables are located (if the database supports schemas). +PlatformaticDB will inspect this schemas to create the entities

Example

CREATE SCHEMA IF NOT EXISTS "test1";
CREATE TABLE IF NOT EXISTS test1.movies (
id INTEGER PRIMARY KEY,
title TEXT NOT NULL
);

CREATE SCHEMA IF NOT EXISTS "test2";
CREATE TABLE IF NOT EXISTS test2.users (
id INTEGER PRIMARY KEY,
title TEXT NOT NULL
);

The schemas must be specified in configuration in the schema section. +Note that if we use schemas and migrations, we must specify the schema in the migrations table as well +(with postgresql, we assume we use the default public schema).

  ...
"db": {
"connectionString": "(...)",
"schema": [
"test1", "test2"
],
"ignore": {
"versions": true
}
},
"migrations": {
"dir": "migrations",
"table": "test1.versions"
},

...

The entities name are then generated in the form schemaName + entityName, PascalCase (this is necessary to avoid name collisions in case there are tables with same name in different schemas). +So for instance for the example above we generate the Test1Movie and Test2User entities.

info

Please pay attention to the entity names when using schema, these are also used to setup authorization rules

+ + + + \ No newline at end of file diff --git a/docs/next/reference/runtime/configuration/index.html b/docs/next/reference/runtime/configuration/index.html new file mode 100644 index 00000000000..bd0d3531aaa --- /dev/null +++ b/docs/next/reference/runtime/configuration/index.html @@ -0,0 +1,67 @@ + + + + + +Configuration | Platformatic Open Source Software + + + + + +
+
Version: Next

Configuration

Platformatic Runtime is configured with a configuration file. It supports the +use of environment variables as setting values with configuration placeholders.

Configuration file

If the Platformatic CLI finds a file in the current working directory matching +one of these filenames, it will automatically load it:

  • platformatic.runtime.json
  • platformatic.runtime.json5
  • platformatic.runtime.yml or platformatic.runtime.yaml
  • platformatic.runtime.tml or platformatic.runtime.toml

Alternatively, a --config option with a configuration +filepath can be passed to most platformatic runtime CLI commands.

The configuration examples in this reference use JSON.

Supported formats

FormatExtensions
JSON.json
JSON5.json5
YAML.yml, .yaml
TOML.tml

Comments are supported by the JSON5, YAML and TOML file formats.

Settings

Configuration settings are organized into the following groups:

Configuration settings containing sensitive data should be set using +configuration placeholders.

The autoload and services settings can be used together, but at least one +of them must be provided. When the configuration file is parsed, autoload +configuration is translated into services configuration.

autoload

The autoload configuration is intended to be used with monorepo applications. +autoload is an object with the following settings:

  • path (required, string) - The path to a directory containing the +microservices to load. In a traditional monorepo application, this directory is +typically named packages.
  • exclude (array of strings) - Child directories inside of path that +should not be processed.
  • mappings (object) - Each microservice is given an ID and is expected +to have a Platformatic configuration file. By default the ID is the +microservice's directory name, and the configuration file is expected to be a +well-known Platformatic configuration file. mappings can be used to override +these default values.
    • id (required, string) - The overridden ID. This becomes the new +microservice ID.
    • config (required**, string) - The overridden configuration file +name. This is the file that will be used when starting the microservice.

services

services is an array of objects that defines the microservices managed by the +runtime. Each service object supports the following settings:

  • id (required, string) - A unique identifier for the microservice. +When working with the Platformatic Composer, this value corresponds to the id +property of each object in the services section of the config file. When +working with client objects, this corresponds to the optional serviceId +property or the name field in the client's package.json file if a +serviceId is not explicitly provided.
  • path (required, string) - The path to the directory containing +the microservice.
  • config (required, string) - The configuration file used to start +the microservice.

entrypoint

The Platformatic Runtime's entrypoint is a microservice that is exposed +publicly. This value must be the ID of a service defined via the autoload or +services configuration.

hotReload

An optional boolean, defaulting to false, indicating if hot reloading should +be enabled for the runtime. If this value is set to false, it will disable +hot reloading for any microservices managed by the runtime. If this value is +true, hot reloading for individual microservices is managed by the +configuration of that microservice.

danger

While hot reloading is useful for development, it is not recommended for use in +production.

allowCycles

An optional boolean, defaulting to false, indicating if dependency cycles +are allowed between microservices managed by the runtime. When the Platformatic +Runtime parses the provided configuration, it examines the clients of each +microservice, as well as the services of Platformatic Composer applications to +build a dependency graph. A topological sort is performed on this dependency +graph so that each service is started after all of its dependencies have been +started. If there are cycles, the topological sort fails and the Runtime does +not start any applications.

If allowCycles is true, the topological sort is skipped, and the +microservices are started in the order specified in the configuration file.

telemetry

Open Telemetry is optionally supported with these settings:

  • serviceName (required, string) — Name of the service as will be reported in open telemetry. In the runtime case, the name of the services as reported in traces is ${serviceName}-${serviceId}, where serviceId is the id of the service in the runtime.
  • version (string) — Optional version (free form)
  • skip (array). Optional list of operations to skip when exporting telemetry defined object with properties:
    • method: GET, POST, PUT, DELETE, PATCH, HEAD, OPTIONS, TRACE
    • path. e.g.: /documentation/json
  • exporter (object or array) — Exporter configuration. If not defined, the exporter defaults to console. If an array of objects is configured, every object must be a valid exporter object. The exporter object has the following properties:
    • type (string) — Exporter type. Supported values are console, otlp, zipkin and memory (default: console). memory is only supported for testing purposes.
    • options (object) — These options are supported:
      • url (string) — The URL to send the telemetry to. Required for otlp exporter. This has no effect on console and memory exporters.
      • headers (object) — Optional headers to send with the telemetry. This has no effect on console and memory exporters.

Note that OTLP traces can be consumed by different solutions, like Jaeger. Here the full list.

Example

{
"telemetry": {
"serviceName": "test-service",
"exporter": {
"type": "otlp",
"options": {
"url": "http://localhost:4318/v1/traces"
}
}
}
}

Environment variable placeholders

The value for any configuration setting can be replaced with an environment +variable by adding a placeholder in the configuration file, for example +{PLT_ENTRYPOINT}.

All placeholders in a configuration must be available as an environment +variable and must meet the +allowed placeholder name rules.

Setting environment variables

If a .env file exists it will automatically be loaded by Platformatic using +dotenv. For example:

.env
PLT_ENTRYPOINT=service

The .env file must be located in the same folder as the Platformatic +configuration file or in the current working directory.

Environment variables can also be set directly on the commmand line, for example:

PLT_ENTRYPOINT=service npx platformatic runtime

Allowed placeholder names

Only placeholder names prefixed with PLT_, or that are in this allow list, +will be dynamically replaced in the configuration file:

  • PORT
  • DATABASE_URL

This restriction is to avoid accidentally exposing system environment variables. +An error will be raised by Platformatic if it finds a configuration placeholder +that isn't allowed.

The default allow list can be extended by passing a --allow-env CLI option +with a comma separated list of strings, for example:

npx platformatic runtime --allow-env=HOST,SERVER_LOGGER_LEVEL

If --allow-env is passed as an option to the CLI, it will be merged with the +default allow list.

+ + + + \ No newline at end of file diff --git a/docs/next/reference/runtime/introduction/index.html b/docs/next/reference/runtime/introduction/index.html new file mode 100644 index 00000000000..353b40e346c --- /dev/null +++ b/docs/next/reference/runtime/introduction/index.html @@ -0,0 +1,33 @@ + + + + + +Platformatic Runtime | Platformatic Open Source Software + + + + + +
+
Version: Next

Platformatic Runtime

Platformatic Runtime is an environment for running multiple Platformatic +microservices as a single monolithic deployment unit.

info

Platformatic Runtime is currently in public beta.

Features

Public beta

Platformatic Runtime is in public beta. You can use it in production, but it's quite +likely that you'll encounter significant bugs.

If you run into a bug or have a suggestion for improvement, please +raise an issue on GitHub.

Standalone usage

If you're only interested in the features available in Platformatic Runtime, you can replace platformatic with @platformatic/runtime in the dependencies of your package.json, so that you'll import fewer deps.

Example configuration file

The following configuration file can be used to start a new Platformatic +Runtime project. For more details on the configuration file, see the +configuration documentation.

{
"$schema": "https://platformatic.dev/schemas/v0.26.0/runtime",
"autoload": {
"path": "./packages",
"exclude": ["docs"]
},
"entrypoint": "entrypointApp"
}

TypeScript Compilation

Platformatic Runtime streamlines the compilation of all services built on TypeScript with the command +plt runtime compile. The TypeScript compiler (tsc) is required to be installed separately.

Interservice communication

The Platformatic Runtime allows multiple microservice applications to run +within a single process. Only the entrypoint binds to an operating system +port and can be reached from outside of the runtime.

Within the runtime, all interservice communication happens by injecting HTTP +requests into the running servers, without binding them to ports. This injection +is handled by +fastify-undici-dispatcher.

Each microservice is assigned an internal domain name based on its unique ID. +For example, a microservice with the ID awesome is given the internal domain +of http://awesome.plt.local. The fastify-undici-dispatcher module maps that +domain to the Fastify server running the awesome microservice. Any Node.js +APIs based on Undici, such as fetch(), will then automatically route requests +addressed to awesome.plt.local to the corresponding Fastify server.

+ + + + \ No newline at end of file diff --git a/docs/next/reference/runtime/programmatic/index.html b/docs/next/reference/runtime/programmatic/index.html new file mode 100644 index 00000000000..15c636fa733 --- /dev/null +++ b/docs/next/reference/runtime/programmatic/index.html @@ -0,0 +1,28 @@ + + + + + +Programmatic API | Platformatic Open Source Software + + + + + +
+
Version: Next

Programmatic API

In many cases it's useful to start Platformatic applications using an API +instead of the command line. The @platformatic/runtime API makes it simple to +work with different application types (e.g. service, db, composer and runtime) without +needing to know the application type a priori.

buildServer()

The buildServer function creates a server from a provided configuration +object or configuration filename. +The config can be of either Platformatic Service, Platformatic DB, +Platformatic Composer or any other application built on top of +Platformatic Service.

import { buildServer } from '@platformatic/runtime'

const app = await buildServer('path/to/platformatic.runtime.json')
const entrypointUrl = await app.start()

// Make a request to the entrypoint.
const res = await fetch(entrypointUrl)
console.log(await res.json())

// Do other interesting things.

await app.close()

It is also possible to customize the configuration:

import { buildServer } from '@platformatic/runtime'

const config = {
// $schema: 'https://platformatic.dev/schemas/v0.39.0/runtime',
// $schema: 'https://platformatic.dev/schemas/v0.39.0/service',
// $schema: 'https://platformatic.dev/schemas/v0.39.0/db',
// $schema: 'https://platformatic.dev/schemas/v0.39.0/composer'
...
}
const app = await buildServer(config)

await app.start()

loadConfig()

The loadConfig function is used to read and parse a configuration file for +an arbitrary Platformatic application.

import { loadConfig } from '@platformatic/runtime'

// Read the config based on command line arguments. loadConfig() will detect
// the application type.
const config = await loadConfig({}, ['-c', '/path/to/platformatic.config.json'])

// Read the config based on command line arguments. The application type can
// be provided explicitly.
const config = await loadConfig(
{},
['-c', '/path/to/platformatic.config.json']
)

// Default config can be specified.
const config = await loadConfig(
{},
['-c', '/path/to/platformatic.config.json'],
{ key: 'value' }
)

start()

The start function loads a configuration, builds a server, and starts the +server. However, the server is not returned.

import { start } from '@platformatic/runtime'

await start(['-c', '/path/to/platformatic.config.json])

startCommand()

The startCommand function is similar to start. However, if an exception +occurs, startCommand logs the error and exits the process. This is different +from start, which throws the exception.

import { startCommand } from '@platformatic/runtime'

await startCommand(['-c', '/path/to/platformatic.config.json])
+ + + + \ No newline at end of file diff --git a/docs/next/reference/service/configuration/index.html b/docs/next/reference/service/configuration/index.html new file mode 100644 index 00000000000..2a0a50589be --- /dev/null +++ b/docs/next/reference/service/configuration/index.html @@ -0,0 +1,38 @@ + + + + + +Configuration | Platformatic Open Source Software + + + + + +
+
Version: Next

Configuration

Platformatic Service configured with a configuration file. It supports the use +of environment variables as setting values with configuration placeholders.

Configuration file

If the Platformatic CLI finds a file in the current working directory matching +one of these filenames, it will automatically load it:

  • platformatic.service.json
  • platformatic.service.json5
  • platformatic.service.yml or platformatic.service.yaml
  • platformatic.service.tml or platformatic.service.toml

Alternatively, a --config option with a configuration +filepath can be passed to most platformatic service CLI commands.

The configuration examples in this reference use JSON.

Supported formats

FormatExtensions
JSON.json
JSON5.json5
YAML.yml, .yaml
TOML.tml

Comments are supported by the JSON5, YAML and TOML file formats.

Settings

Configuration settings are organised into the following groups:

Sensitive configuration settings, such as a database connection URL that contains +a password, should be set using configuration placeholders.

server

A required object with the following settings:

  • hostname (required, string) — Hostname where Platformatic Service server will listen for connections.

  • port (required, number) — Port where Platformatic Service server will listen for connections.

  • healthCheck (boolean or object) — Enables the health check endpoint.

    • Powered by @fastify/under-pressure.
    • The value can be an object, used to specify the interval between checks in milliseconds (default: 5000)

    Example

    {
    "server": {
    ...
    "healthCheck": {
    "interval": 2000
    }
    }
    }
  • cors (object) — Configuration for Cross-Origin Resource Sharing (CORS) headers.

    • All options will be passed to the @fastify/cors plugin. In order to specify a RegExp object, you can pass { regexp: 'yourregexp' }, +it will be automatically converted.
  • logger (object) -- the logger configuration.

  • pluginTimeout (integer) -- the number of milliseconds to wait for a Fastify plugin to load, see the fastify docs for more details.

  • https (object) - Configuration for HTTPS supporting the following options.

    • key (required, string, object, or array) - If key is a string, it specifies the private key to be used. If key is an object, it must have a path property specifying the private key file. Multiple keys are supported by passing an array of keys.
    • cert (required, string, object, or array) - If cert is a string, it specifies the certificate to be used. If cert is an object, it must have a path property specifying the certificate file. Multiple certificates are supported by passing an array of keys.

metrics

Configuration for a Prometheus server that will export monitoring metrics +for the current server instance. It uses fastify-metrics +under the hood.

This setting can be a boolean or an object. If set to true the Prometheus server will listen on http://0.0.0.0:9090.

Supported object properties:

  • hostname (string) — The hostname where Prometheus server will listen for connections.
  • port (number) — The port where Prometheus server will listen for connections.
  • auth (object) — Basic Auth configuration. username and password are required here +(use environment variables).

plugins

An optional object that defines the plugins loaded by Platformatic Service.

  • paths (required, array): an array of paths (string) +or an array of objects composed as follows,
    • path (string): Relative path to plugin's entry point.
    • options (object): Optional plugin options.
    • encapsulate (boolean): if the path is a folder, it instruct Platformatic to not encapsulate those plugins.
    • maxDepth (integer): if the path is a folder, it limits the depth to load the content from.
  • typescript (boolean or object): enable TypeScript compilation. A tsconfig.json file is required in the same folder.

Example

{
"plugins": {
"paths": [{
"path": "./my-plugin.js",
"options": {
"foo": "bar"
}
}]
}
}

typescript compilation options

The typescript can also be an object to customize the compilation. Here are the supported options:

  • enabled (boolean): enables compilation
  • tsConfig (string): path to the tsconfig.json file relative to the configuration
  • outDir (string): the output directory of tsconfig.json, in case tsconfig.json is not available +and and enabled is set to false (procution build)
  • flags (array of string): flags to be passed to tsc. Overrides tsConfig. +

Example:

{
"plugins": {
"paths": [{
"path": "./my-plugin.js",
"options": {
"foo": "bar"
}
}],
"typescript": {
"enabled": false,
"tsConfig": "./path/to/tsconfig.json",
"outDir": "dist"
}
}
}

watch

Disable watching for file changes if set to false. It can also be customized with the following options:

  • ignore (string[], default: null): List of glob patterns to ignore when watching for changes. If null or not specified, ignore rule is not applied. Ignore option doesn't work for typescript files.

  • allow (string[], default: ['*.js', '**/*.js']): List of glob patterns to allow when watching for changes. If null or not specified, allow rule is not applied. Allow option doesn't work for typescript files.

    Example

    {
    "watch": {
    "ignore": ["*.mjs", "**/*.mjs"],
    "allow": ["my-plugin.js", "plugins/*.js"]
    }
    }

service

Configure @platformatic/service specific settings such as graphql or openapi:

  • graphql (boolean or object, default: false) — Controls the GraphQL API interface, with optional GraphiQL UI.

    Examples

    Enables GraphQL support

    {
    "service": {
    "graphql": true
    }
    }

    Enables GraphQL support with GraphiQL

    {
    "service": {
    "graphql": {
    "graphiql": true
    }
    }
    }
  • openapi (boolean or object, default: false) — Enables OpenAPI REST support.

    • If value is an object, all OpenAPI v3 allowed properties can be passed. Also a prefix property can be passed to set the OpenAPI prefix.
    • Platformatic Service uses @fastify/swagger under the hood to manage this configuration.

    Examples

    Enables OpenAPI

    {
    "service": {
    ...
    "openapi": true
    }
    }

    Enables OpenAPI with prefix

    {
    "service": {
    "openapi": {
    "prefix": "/api"
    }
    }
    }

    Enables OpenAPI with options

    {
    "service": {
    "openapi": {
    "info": {
    "title": "Platformatic Service",
    "description": "Exposing a SQL database as REST"
    }
    }
    }
    }

telemetry

Open Telemetry is optionally supported with these settings:

  • serviceName (required, string) — Name of the service as will be reported in open telemetry.
  • version (string) — Optional version (free form)
  • skip (array). Optional list of operations to skip when exporting telemetry defined object with properties:
    • method: GET, POST, PUT, DELETE, PATCH, HEAD, OPTIONS, TRACE
    • path. e.g.: /documentation/json
  • exporter (object or array) — Exporter configuration. If not defined, the exporter defaults to console. If an array of objects is configured, every object must be a valid exporter object. The exporter object has the following properties:
    • type (string) — Exporter type. Supported values are console, otlp, zipkin and memory (default: console). memory is only supported for testing purposes.
    • options (object) — These options are supported:
      • url (string) — The URL to send the telemetry to. Required for otlp exporter. This has no effect on console and memory exporters.
      • headers (object) — Optional headers to send with the telemetry. This has no effect on console and memory exporters.

Note that OTLP traces can be consumed by different solutions, like Jaeger. Here the full list.

Example

{
"telemetry": {
"serviceName": "test-service",
"exporter": {
"type": "otlp",
"options": {
"url": "http://localhost:4318/v1/traces"
}
}
}
}

Environment variable placeholders

The value for any configuration setting can be replaced with an environment variable +by adding a placeholder in the configuration file, for example {PLT_SERVER_LOGGER_LEVEL}.

All placeholders in a configuration must be available as an environment variable +and must meet the allowed placeholder name rules.

Example

platformatic.service.json
{
"server": {
"port": "{PORT}"
}
}

Platformatic will replace the placeholders in this example with the environment +variables of the same name.

Setting environment variables

If a .env file exists it will automatically be loaded by Platformatic using +dotenv. For example:

.env
PLT_SERVER_LOGGER_LEVEL=info
PORT=8080

The .env file must be located in the same folder as the Platformatic configuration +file or in the current working directory.

Environment variables can also be set directly on the commmand line, for example:

PLT_SERVER_LOGGER_LEVEL=debug npx platformatic service

Allowed placeholder names

Only placeholder names prefixed with PLT_, or that are in this allow list, will be +dynamically replaced in the configuration file:

  • PORT
  • DATABASE_URL

This restriction is to avoid accidentally exposing system environment variables. +An error will be raised by Platformatic if it finds a configuration placeholder +that isn't allowed.

The default allow list can be extended by passing a --allow-env CLI option with a +comma separated list of strings, for example:

npx platformatic service --allow-env=HOST,SERVER_LOGGER_LEVEL

If --allow-env is passed as an option to the CLI, it will be merged with the +default allow list.

+ + + + \ No newline at end of file diff --git a/docs/next/reference/service/introduction/index.html b/docs/next/reference/service/introduction/index.html new file mode 100644 index 00000000000..603e448affb --- /dev/null +++ b/docs/next/reference/service/introduction/index.html @@ -0,0 +1,21 @@ + + + + + +Platformatic Service | Platformatic Open Source Software + + + + + +
+
Version: Next

Platformatic Service

Platformatic Service is an HTTP server that provides a developer tools for +building robust APIs with Node.js.

For a high level overview of how Platformatic DB works, please reference the +Architecture guide.

info

Platformatic Service is currently in public beta.

Features

Public beta

Platformatic Service is in public beta. You can use it in production, but it's quite +likely that you'll encounter significant bugs.

If you run into a bug or have a suggestion for improvement, please +raise an issue on GitHub.

Standalone usage

If you're only interested in the features available in Platformatic Service, you can simply switch platformatic with @platformatic/service in the dependencies of your package.json, so that you'll only import fewer deps.

You can use the plt-service command, it's the equivalent of plt service.

+ + + + \ No newline at end of file diff --git a/docs/next/reference/service/plugin/index.html b/docs/next/reference/service/plugin/index.html new file mode 100644 index 00000000000..08b701e6052 --- /dev/null +++ b/docs/next/reference/service/plugin/index.html @@ -0,0 +1,21 @@ + + + + + +Plugin | Platformatic Open Source Software + + + + + +
+
Version: Next

Plugin

If you want to add features to a service, you will need to register a plugin, which will be in the form of a standard Fastify plugin.

The config file will specify where the plugin file is located as the example below:

{
...
"plugins": {
"paths": ["./plugin/index.js"]
}
}

The path is relative to the config file path.

You should export an async function which receives a parameters

  • app (FastifyInstance) that is the main fastify instance
  • opts all the options specified in the config file after path

Hot Reload

Plugin file is being watched by fs.watch function.

You don't need to reload Platformatic Service server while working on your plugin. Every time you save, the watcher will trigger a reload event and the server will auto-restart and load your updated code.

tip

At this time, on Linux, file watch in subdirectories is not supported due to a Node.js limitation (documented here).

Directories

The path can also be a directory. In that case, the directory will be loaded with @fastify/autoload.

Consider the following directory structure:

├── routes
│ ├── foo
│ │ ├── something.js
│ │ └── bar
│ │ └── baz.js
│ ├── single-plugin
│ │ └── utils.js
│ └── another-plugin.js
└── platformatic.service.json

By default the folder will be added as a prefix to all the routes defined within them. +See the autoload documentation for all the options to customize this behavior.

Multiple plugins

Multiple plugins can be loaded in parallel by specifying an array:

{
...
"plugins": {
"paths": [{
"path": "./plugin/index.js"
}, {
"path": "./routes/"
}]
}
}

TypeScript and Autocompletion

In order to provide the correct typings of the features added by Platformatic Service to your Fastify instance, +add the following at the top of your files:

/// <references types="@platformatic/service" />

Plugin definition with TypeScript

Here is an example of writing a plugin in TypeScript:

/// <reference types="@platformatic/service" />
import { FastifyInstance, FastifyPluginOptions } from 'fastify'

export default async function (fastify: FastifyInstance, opts: FastifyPluginOptions) {
}

Note that you need to add the "typescript": true configuration to your platformatic.service.json.

Loading compiled files

Setting "typescript": false but including a tsconfig.json with an outDir +option, will instruct Platformatic Service to try loading your plugins from that folder instead. +This setup is needed to support pre-compiled sources to reduce cold start time during deployment.

+ + + + \ No newline at end of file diff --git a/docs/next/reference/service/programmatic/index.html b/docs/next/reference/service/programmatic/index.html new file mode 100644 index 00000000000..b0eb1b2b990 --- /dev/null +++ b/docs/next/reference/service/programmatic/index.html @@ -0,0 +1,19 @@ + + + + + +Programmatic API | Platformatic Open Source Software + + + + + +
+
Version: Next

Programmatic API

In many cases it's useful to start Platformatic Service using an API instead of +command line, e.g. in tests we want to start and stop our server.

The buildServer function allows that:

import { buildServer } from '@platformatic/service'

const app = await buildServer('path/to/platformatic.service.json')

await app.start()

const res = await fetch(app.url)
console.log(await res.json())

// do something

await app.close()

It is also possible to customize the configuration:

import { buildServer } from '@platformatic/service'

const app = await buildServer({
server: {
hostname: '127.0.0.1',
port: 0
}
})

await app.start()

const res = await fetch(app.url)
console.log(await res.json())

// do something

await app.close()

Creating a reusable application on top of Platformatic Service

Platformatic DB is built on top of Platformatic Serivce. +If you want to build a similar kind of tool, follow this example:

import { buildServer, schema } from '@platformatic/service'

async function myPlugin (app, opts) {
// app.platformatic.configManager contains an instance of the ConfigManager
console.log(app.platformatic.configManager.current)

await platformaticService(app, opts)
}

// break Fastify encapsulation
myPlugin[Symbol.for('skip-override')] = true
myPlugin.configType = 'myPlugin'

// This is the schema for this reusable application configuration file,
// customize at will but retain the base properties of the schema from
// @platformatic/service
myPlugin.schema = schema

// The configuration of the ConfigManager
myPlugin.configManagerConfig = {
schema: foo.schema,
envWhitelist: ['PORT', 'HOSTNAME'],
allowToWatch: ['.env'],
schemaOptions: {
useDefaults: true,
coerceTypes: true,
allErrors: true,
strict: false
},
async transformConfig () {
console.log(this.current) // this is the current config

// In this method you can alter the configuration before the application
// is started. It's useful to apply some defaults that cannot be derived
// inside the schema, such as resolving paths.
}
}


const server = await buildServer('path/to/config.json', myPlugin)

await server.start()

const res = await fetch(server.listeningOrigin)
console.log(await res.json())

// do something

await service.close()
+ + + + \ No newline at end of file diff --git a/docs/next/reference/sql-events/fastify-plugin/index.html b/docs/next/reference/sql-events/fastify-plugin/index.html new file mode 100644 index 00000000000..08f5b2f431c --- /dev/null +++ b/docs/next/reference/sql-events/fastify-plugin/index.html @@ -0,0 +1,19 @@ + + + + + +Fastify Plugin | Platformatic Open Source Software + + + + + +
+
Version: Next

Fastify Plugin

The @platformatic/sql-events package exports a Fastify plugin that can be used out-of the box in a server application. +It requires that @platformatic/sql-mapper is registered before it.

The plugin has the following options:

The plugin adds the following properties to the app.platformatic object:

  • mq — an instance of mqemitter
  • subscribe(topics) — a method to create a node Readable +that will contain the events emitted by those topics.

Each entities of app.platformatic.entities will be augmented with two functions:

  • entity.getPublishTopic({ ctx, data, action })
  • entity.getSubscriptionTopic({ ctx, action })

Where ctx is the GraphQL Context, data is the object that will be emitted and action is either save or delete.

Usage

'use strict'

const Fastify = require('fastify')
const mapper = require('@platformatic/sql-mapper')
const events = require('@platformatic/sql-events')

async function main() {
const app = Fastify({
logger: {
level: 'info'
}
})
app.register(mapper.plugin, {
connectionString: 'postgres://postgres:postgres@127.0.0.1/postgres'
})

app.register(events)

// setup your routes


await app.listen({ port: 3333 })
}

main()
+ + + + \ No newline at end of file diff --git a/docs/next/reference/sql-events/introduction/index.html b/docs/next/reference/sql-events/introduction/index.html new file mode 100644 index 00000000000..01970085f5e --- /dev/null +++ b/docs/next/reference/sql-events/introduction/index.html @@ -0,0 +1,21 @@ + + + + + +Introduction to the sql-events module | Platformatic Open Source Software + + + + + +
+
Version: Next

Introduction to the sql-events module

The Platformatic DB sql-events uses mqemitter to publish events when entities are saved and deleted.

These events are useful to distribute updates to clients, e.g. via WebSocket, Server-Sent Events, or GraphQL Subscritions. +When subscribing and using a multi-process system with a broker like Redis, a subscribed topic will receive the data from all +the other processes.

They are not the right choice for executing some code whenever an entity is created, modified or deleted, in that case +use @platformatic/sql-mapper hooks.

Install

You can use together with @platformatic/sql-mapper.

npm i @platformatic/sql-mapper @platformatic/sql-events

Usage

const { connect } = require('@platformatic/sql-mapper')
const { setupEmitter } = require('@platformatic/sql-events')
const { pino } = require('pino')

const log = pino()

async function onDatabaseLoad (db, sql) {
await db.query(sql`CREATE TABLE pages (
id SERIAL PRIMARY KEY,
title VARCHAR(255) NOT NULL
);`)
}
const connectionString = 'postgres://postgres:postgres@localhost:5432/postgres'
const mapper = await connect({
connectionString,
log,
onDatabaseLoad,
ignore: {},
hooks: {
Page: {
find: async function(_find, opts) {
console.log('hook called');
return await _find(opts)
}
}
}
})

setupEmitter({ mapper, log })

const pageEntity = mapper.entities.page

const queue = await mapper.subscribe([
pageEntity.getSubscriptionTopic({ action: 'save' }),
pageEntity.getSubscriptionTopic({ action: 'delete' })
])

const page = await pageEntity.save({
input: { title: 'fourth page' }
})

const page2 = await pageEntity.save({
input: {
id: page.id,
title: 'fifth page'
}
})

await pageEntity.delete({
where: {
id: {
eq: page.id
}
},
fields: ['id', 'title']
})

for await (const ev of queue) {
console.log(ev)
if (expected.length === 0) {
break
}
}

process.exit(0)

API

The setupEmitter function has the following options:

The setupEmitter functions adds the following properties to the mapper object:

  • mq — an instance of mqemitter
  • subscribe(topics) — a method to create a node Readable +that will contain the events emitted by those topics.

Each entities of app.platformatic.entities will be augmented with two functions:

  • entity.getPublishTopic({ ctx, data, action })
  • entity.getSubscriptionTopic({ ctx, action })

Where ctx is the GraphQL Context, data is the object that will be emitted and action is either save or delete.

+ + + + \ No newline at end of file diff --git a/docs/next/reference/sql-graphql/ignore/index.html b/docs/next/reference/sql-graphql/ignore/index.html new file mode 100644 index 00000000000..e94890eb8cf --- /dev/null +++ b/docs/next/reference/sql-graphql/ignore/index.html @@ -0,0 +1,17 @@ + + + + + +Ignoring types and fields | Platformatic Open Source Software + + + + + + + + + + \ No newline at end of file diff --git a/docs/next/reference/sql-graphql/introduction/index.html b/docs/next/reference/sql-graphql/introduction/index.html new file mode 100644 index 00000000000..6b6c740431c --- /dev/null +++ b/docs/next/reference/sql-graphql/introduction/index.html @@ -0,0 +1,21 @@ + + + + + +Introduction to the GraphQL API | Platformatic Open Source Software + + + + + +
+
Version: Next

Introduction to the GraphQL API

The Platformatic DB GraphQL plugin starts a GraphQL server wand makes it available +via a /graphql endpoint. This endpoint is automatically ready to run queries and +mutations against your entities. This functionality is powered by +Mercurius.

GraphiQL

The GraphiQL web UI is integrated into +Platformatic DB. To enable it you can pass an option to the sql-graphql plugin:

app.register(graphqlPlugin, { graphiql: true })

The GraphiQL interface is made available under the /graphiql path.

+ + + + \ No newline at end of file diff --git a/docs/next/reference/sql-graphql/many-to-many/index.html b/docs/next/reference/sql-graphql/many-to-many/index.html new file mode 100644 index 00000000000..d6271c49d49 --- /dev/null +++ b/docs/next/reference/sql-graphql/many-to-many/index.html @@ -0,0 +1,20 @@ + + + + + +Many To Many Relationship | Platformatic Open Source Software + + + + + +
+
Version: Next

Many To Many Relationship

Many-to-Many relationship lets you relate each row in one table to many rows in +another table and vice versa.

Many-to-many relationship are implemented in SQL via a "join table", a table whose primary key +is composed by the identifier of the two parts of the many-to-many relationship.

Platformatic DB fully support many-to-many relationships on all supported databases.

Example

Consider the following schema (SQLite):

CREATE TABLE pages (
id INTEGER PRIMARY KEY,
the_title VARCHAR(42)
);

CREATE TABLE users (
id INTEGER PRIMARY KEY,
username VARCHAR(255) NOT NULL
);

CREATE TABLE editors (
page_id INTEGER NOT NULL,
user_id INTEGER NOT NULL,
role VARCHAR(255) NOT NULL,
CONSTRAINT fk_editor_pages FOREIGN KEY (page_id) REFERENCES pages(id),
CONSTRAINT fk_editor_users FOREIGN KEY (user_id) REFERENCES users(id),
PRIMARY KEY (page_id, user_id)
);

The table editors is a "join table" between users and pages. +Given this schema, you could issue queries like:

query {
editors(orderBy: { field: role, direction: DESC }) {
user {
id
username
}
page {
id
theTitle
}
role
}
}

Mutation works exactly the same as before:

mutation {
saveEditor(input: { userId: "1", pageId: "1", role: "captain" }) {
user {
id
username
}
page {
id
theTitle
}
role
}
}
+ + + + \ No newline at end of file diff --git a/docs/next/reference/sql-graphql/mutations/index.html b/docs/next/reference/sql-graphql/mutations/index.html new file mode 100644 index 00000000000..02f1723210f --- /dev/null +++ b/docs/next/reference/sql-graphql/mutations/index.html @@ -0,0 +1,20 @@ + + + + + +Mutations | Platformatic Open Source Software + + + + + +
+
Version: Next

Mutations

When the GraphQL plugin is loaded, some mutations are automatically adding to +the GraphQL schema.

save[ENTITY]

Saves a new entity to the database or updates an existing entity. +This actually behaves as an upsert, allowing both behaviours depending on the presence of the primary key field.

Example

'use strict'

const Fastify = require('fastify')
const graphqlPlugin = require('@platformatic/sql-graphql')
const sqlMapper = require('@platformatic/sql-mapper')

async function main() {
const app = Fastify({
logger: {
level: 'info'
}
})
app.register(sqlMapper, {
connectionString: 'postgres://postgres:postgres@127.0.0.1/postgres',
log: logger,
})
app.register(graphqlPlugin, {
graphiql: true
})
const res = await app.inject({
method: 'POST',
url: '/graphql',
body: {
query: `
mutation {
savePage(input: { id: 3 title: "Platformatic is cool!" }) {
id
title
}
}
`
}
})
const result = await res.json()
console.log(result.data) // { savePage: { id: '3', title: 'Platformatic is cool!' } }
await app.close()
}

main()

insert[ENTITY]

Inserts a new entity in the database.

Example

'use strict'

const Fastify = require('fastify')
const graphqlPlugin = require('@platformatic/sql-graphql')
const sqlMapper = require('@platformatic/sql-mapper')

async function main() {
const app = Fastify({
logger: {
level: 'info'
}
})
app.register(sqlMapper, {
connectionString: 'postgres://postgres:postgres@127.0.0.1/postgres',
log: logger,
})
app.register(graphqlPlugin, {
graphiql: true
})
const res = await app.inject({
method: 'POST',
url: '/graphql',
body: {
query: `
mutation {
savePage(input: { title: "Platformatic is cool!" }) {
id
title
}
}
`
}
})
const result = await res.json()
console.log(result.data) // { savePage: { id: '4', title: 'Platformatic is cool!' } }
await app.close()
}

main()

delete[ENTITIES]

Deletes one or more entities from the database, based on the where clause +passed as an input to the mutation.

Example

'use strict'

const Fastify = require('fastify')
const graphqlPlugin = require('@platformatic/sql-graphql')
const sqlMapper = require('@platformatic/sql-mapper')

async function main() {
const app = Fastify({
logger: {
level: 'info'
}
})
app.register(sqlMapper, {
connectionString: 'postgres://postgres:postgres@127.0.0.1/postgres',
log: logger,
})
app.register(graphqlPlugin, {
graphiql: true
})
const res = await app.inject({
method: 'POST',
url: '/graphql',
body: {
query: `
mutation {
deletePages(where: { id: { eq: "3" } }) {
id
title
}
}
`
}
})
const result = await res.json()
console.log(result.data) // { deletePages: [ { id: '3', title: 'Platformatic is cool!' } ] }
await app.close()
}

main()
+ + + + \ No newline at end of file diff --git a/docs/next/reference/sql-graphql/queries/index.html b/docs/next/reference/sql-graphql/queries/index.html new file mode 100644 index 00000000000..bf1a5637432 --- /dev/null +++ b/docs/next/reference/sql-graphql/queries/index.html @@ -0,0 +1,21 @@ + + + + + +Queries | Platformatic Open Source Software + + + + + +
+
Version: Next

Queries

A GraphQL query is automatically added to the GraphQL schema for each database +table, along with a complete mapping for all table fields.

Example

'use strict'

const Fastify = require('fastify')
const graphqlPlugin = require('@platformatic/sql-graphql')
const sqlMapper = require('@platformatic/sql-mapper')
async function main() {
const app = Fastify({
logger: {
level: 'info'
}
})
app.register(sqlMapper, {
connectionString: 'postgres://postgres:postgres@127.0.0.1/postgres'
})
app.register(graphqlPlugin, {
graphiql: true
})
const res = await app.inject({
method: 'POST',
url: '/graphql',
body: {
query: `
query{
pages{
id,
title
}
}
`
}
})
const result = await res.json()
console.log(result.data)
await app.close()
}
main()

Advanced Queries

The following additional queries are added to the GraphQL schema for each entity:

get[ENTITY]by[PRIMARY_KEY]

If you have a table pages with the field id as the primary key, you can run +a query called getPageById.

Example

...
const res = await app.inject({
method: 'POST',
url: '/graphql',
body: {
query: `
query{
getPageById(id: 3) {
id,
title
}
}
`
}
})
const result = await res.json()
console.log(result.data) // { getPageById: { id: '3', title: 'A fiction' } }

count[ENTITIES]

...
const res = await app.inject({
method: 'POST',
url: '/graphql',
body: {
query: `
query {
countPages {
total
}
}
`
}
})
const result = await res.json()
console.log(result.data) // { countMovies : { total: { 17 } }

Pagination

The Platformatic DB supports for result's pagination through input parameters: limit and offset

Example

{
users(limit:5, offset: 10) {
name
}
}

It returns 5 users starting from position 10.

Limit

By default a limit value (10) is applied to each request.

Clients can override this behavior by passing a value. +In this case the server validates the input and an error is return if exceeds the max accepted value (100).

Limit's values can be customized through configuration:

{
...
"db": {
...
"limit": {
"default": 50,
"max": 1000
}
}
}

Limit only accepts values >= 0. Otherwise an error is return.

Offset

By default offset is not applied to the request. +Clients can override this behavior by passing a value.

Offset only accepts values >= 0. Otherwise an error is return.

+ + + + \ No newline at end of file diff --git a/docs/next/reference/sql-graphql/subscriptions/index.html b/docs/next/reference/sql-graphql/subscriptions/index.html new file mode 100644 index 00000000000..3a3565742ef --- /dev/null +++ b/docs/next/reference/sql-graphql/subscriptions/index.html @@ -0,0 +1,19 @@ + + + + + +Subscription | Platformatic Open Source Software + + + + + +
+
Version: Next

Subscription

When the GraphQL plugin is loaded, some subscriptions are automatically adding to +the GraphQL schema if the @platformatic/sql-events plugin has been previously registered.

It's possible to avoid creating the subscriptions for a given entity by adding the subscriptionIgnore config, +like so: subscriptionIgnore: ['page'].

[ENTITY]Saved

Published whenever an entity is saved, e.g. when the mutation insert[ENTITY] or save[ENTITY] are called.

[ENTITY]Deleted

Published whenever an entity is deleted, e.g. when the mutation delete[ENTITY] is called..

+ + + + \ No newline at end of file diff --git a/docs/next/reference/sql-mapper/entities/api/index.html b/docs/next/reference/sql-mapper/entities/api/index.html new file mode 100644 index 00000000000..4cf297d8436 --- /dev/null +++ b/docs/next/reference/sql-mapper/entities/api/index.html @@ -0,0 +1,18 @@ + + + + + +API | Platformatic Open Source Software + + + + + +
+
Version: Next

API

A set of operation methods are available on each entity:

Returned fields

The entity operation methods accept a fields option that can specify an array of field names to be returned. If not specified, all fields will be returned.

Where clause

The entity operation methods accept a where option to allow limiting of the database rows that will be affected by the operation.

The where object's key is the field you want to check, the value is a key/value map where the key is an operator (see the table below) and the value is the value you want to run the operator against.

Platformatic operatorSQL operator
eq'='
in'IN'
nin'NOT IN'
neq'<>'
gt'>'
gte'>='
lt'<'
lte'<='
like'LIKE'

Examples

Selects row with id = 1

{
...
"where": {
id: {
eq: 1
}
}
}

Select all rows with id less than 100

{
...
"where": {
id: {
lt: 100
}
}
}

Select all rows with id 1, 3, 5 or 7

{
...
"where": {
id: {
in: [1, 3, 5, 7]
}
}
}

Where clause operations are by default combined with the AND operator. To combine them with the OR operator, use the or key.

Select all rows with id 1 or 3

{
...
"where": {
or: [
{
id: {
eq: 1
}
},
{
id: {
eq: 3
}
}
]
}
}

Select all rows with id 1 or 3 and title like 'foo%'

{
...
"where": {
or: [
{
id: {
eq: 1
}
},
{
id: {
eq: 3
}
}
],
title: {
like: 'foo%'
}
}
}

Reference

find

Retrieve data for an entity from the database.

Options

NameTypeDescription
fieldsArray of stringList of fields to be returned for each object
whereObjectWhere clause 🔗
orderByArray of ObjectObject like { field: 'counter', direction: 'ASC' }
limitNumberLimits the number of returned elements
offsetNumberThe offset to start looking for rows from

Usage

'use strict'

const { connect } = require('@platformatic/sql-mapper')
const { pino } = require('pino')
const pretty = require('pino-pretty')
const logger = pino(pretty())

async function main() {
const pgConnectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
const mapper = await connect({
connectionString: pgConnectionString,
log: logger,
})
const res = await mapper.entities.page.find({
fields: ['id', 'title',],
where: {
id: {
lt: 10
}
},
})
logger.info(res)
await mapper.db.dispose()
}
main()

count

Same as find, but only count entities.

Options

NameTypeDescription
whereObjectWhere clause 🔗

Usage

'use strict'

const { connect } = require('@platformatic/sql-mapper')
const { pino } = require('pino')
const pretty = require('pino-pretty')
const logger = pino(pretty())

async function main() {
const pgConnectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
const mapper = await connect({
connectionString: pgConnectionString,
log: logger,
})
const res = await mapper.entities.page.count({
where: {
id: {
lt: 10
}
},
})
logger.info(res)
await mapper.db.dispose()
}
main()

insert

Insert one or more entity rows in the database.

Options

NameTypeDescription
fieldsArray of stringList of fields to be returned for each object
inputsArray of ObjectEach object is a new row

Usage

'use strict'

const { connect } = require('@platformatic/sql-mapper')
const { pino } = require('pino')
const pretty = require('pino-pretty')
const logger = pino(pretty())

async function main() {
const pgConnectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
const mapper = await connect({
connectionString: pgConnectionString,
log: logger,
})
const res = await mapper.entities.page.insert({
fields: ['id', 'title' ],
inputs: [
{ title: 'Foobar' },
{ title: 'FizzBuzz' }
],
})
logger.info(res)
/**
0: {
"id": "16",
"title": "Foobar"
}
1: {
"id": "17",
"title": "FizzBuzz"
}
*/
await mapper.db.dispose()
}
main()

save

Create a new entity row in the database or update an existing one.

To update an existing entity, the id field (or equivalent primary key) must be included in the input object. +save actually behaves as an upsert, allowing both behaviours depending on the presence of the primary key field.

Options

NameTypeDescription
fieldsArray of stringList of fields to be returned for each object
inputObjectThe single row to create/update

Usage

'use strict'
const { connect } = require('@platformatic/sql-mapper')
const { pino } = require('pino')
const pretty = require('pino-pretty')
const logger = pino(pretty())

async function main() {
const connectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
const mapper = await connect({
connectionString: connectionString,
log: logger,
})
const res = await mapper.entities.page.save({
fields: ['id', 'title' ],
input: { id: 1, title: 'FizzBuzz' },
})
logger.info(res)
await mapper.db.dispose()
}
main()

delete

Delete one or more entity rows from the database, depending on the where option. Returns the data for all deleted objects.

Options

NameTypeDescription
fieldsArray of stringList of fields to be returned for each object
whereObjectWhere clause 🔗

Usage

'use strict'
const { connect } = require('@platformatic/sql-mapper')
const { pino } = require('pino')
const pretty = require('pino-pretty')
const logger = pino(pretty())

async function main() {
const connectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
const mapper = await connect({
connectionString: connectionString,
log: logger,
})
const res = await mapper.entities.page.delete({
fields: ['id', 'title',],
where: {
id: {
lt: 4
}
},
})
logger.info(res)
await mapper.db.dispose()
}
main()

updateMany

Update one or more entity rows from the database, depending on the where option. Returns the data for all updated objects.

Options

NameTypeDescription
whereObjectWhere clause 🔗
inputObjectThe new values that want to update
fieldsArray of stringList of fields to be returned for each object

Usage

'use strict'
const { connect } = require('@platformatic/sql-mapper')
const { pino } = require('pino')
const pretty = require('pino-pretty')
const logger = pino(pretty())

async function main() {
const connectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
const mapper = await connect({
connectionString: connectionString,
log: logger,
})
const res = await mapper.entities.page.updateMany({
fields: ['id', 'title',],
where: {
counter: {
gte: 30
}
},
input: {
title: 'Updated title'
}
})
logger.info(res)
await mapper.db.dispose()
}
main()

+ + + + \ No newline at end of file diff --git a/docs/next/reference/sql-mapper/entities/example/index.html b/docs/next/reference/sql-mapper/entities/example/index.html new file mode 100644 index 00000000000..808ecf8b125 --- /dev/null +++ b/docs/next/reference/sql-mapper/entities/example/index.html @@ -0,0 +1,17 @@ + + + + + +Example | Platformatic Open Source Software + + + + + +
+
Version: Next

Example

Given this PostgreSQL SQL schema:

CREATE TABLE "categories" (
"id" int4 NOT NULL DEFAULT nextval('categories_id_seq'::regclass),
"name" varchar(255) NOT NULL,
PRIMARY KEY ("id")
);

CREATE TABLE "pages" (
"id" int4 NOT NULL DEFAULT nextval('pages_id_seq'::regclass),
"title" varchar(255) NOT NULL,
"category_id" int4,
"user_id" int4,
PRIMARY KEY ("id")
);

ALTER TABLE "pages" ADD FOREIGN KEY ("category_id") REFERENCES "categories"("id");

app.platformatic.entities will contain this mapping object:

{
"category": {
"name": "Category",
"singularName": "category",
"pluralName": "categories",
"primaryKey": "id",
"table": "categories",
"fields": {
"id": {
"sqlType": "int4",
"isNullable": false,
"primaryKey": true,
"camelcase": "id"
},
"name": {
"sqlType": "varchar",
"isNullable": false,
"camelcase": "name"
}
},
"camelCasedFields": {
"id": {
"sqlType": "int4",
"isNullable": false,
"primaryKey": true,
"camelcase": "id"
},
"name": {
"sqlType": "varchar",
"isNullable": false,
"camelcase": "name"
}
},
"relations": [],
"reverseRelationships": [
{
"sourceEntity": "Page",
"relation": {
"constraint_catalog": "postgres",
"constraint_schema": "public",
"constraint_name": "pages_category_id_fkey",
"table_catalog": "postgres",
"table_schema": "public",
"table_name": "pages",
"constraint_type": "FOREIGN KEY",
"is_deferrable": "NO",
"initially_deferred": "NO",
"enforced": "YES",
"column_name": "category_id",
"ordinal_position": 1,
"position_in_unique_constraint": 1,
"foreign_table_name": "categories",
"foreign_column_name": "id"
}
}
]
},
"page": {
"name": "Page",
"singularName": "page",
"pluralName": "pages",
"primaryKey": "id",
"table": "pages",
"fields": {
"id": {
"sqlType": "int4",
"isNullable": false,
"primaryKey": true,
"camelcase": "id"
},
"title": {
"sqlType": "varchar",
"isNullable": false,
"camelcase": "title"
},
"category_id": {
"sqlType": "int4",
"isNullable": true,
"foreignKey": true,
"camelcase": "categoryId"
},
"user_id": {
"sqlType": "int4",
"isNullable": true,
"camelcase": "userId"
}
},
"camelCasedFields": {
"id": {
"sqlType": "int4",
"isNullable": false,
"primaryKey": true,
"camelcase": "id"
},
"title": {
"sqlType": "varchar",
"isNullable": false,
"camelcase": "title"
},
"categoryId": {
"sqlType": "int4",
"isNullable": true,
"foreignKey": true,
"camelcase": "categoryId"
},
"userId": {
"sqlType": "int4",
"isNullable": true,
"camelcase": "userId"
}
},
"relations": [
{
"constraint_catalog": "postgres",
"constraint_schema": "public",
"constraint_name": "pages_category_id_fkey",
"table_catalog": "postgres",
"table_schema": "public",
"table_name": "pages",
"constraint_type": "FOREIGN KEY",
"is_deferrable": "NO",
"initially_deferred": "NO",
"enforced": "YES",
"column_name": "category_id",
"ordinal_position": 1,
"position_in_unique_constraint": 1,
"foreign_table_name": "categories",
"foreign_column_name": "id"
}
],
"reverseRelationships": []
}
}
+ + + + \ No newline at end of file diff --git a/docs/next/reference/sql-mapper/entities/fields/index.html b/docs/next/reference/sql-mapper/entities/fields/index.html new file mode 100644 index 00000000000..f22d5378486 --- /dev/null +++ b/docs/next/reference/sql-mapper/entities/fields/index.html @@ -0,0 +1,17 @@ + + + + + +Fields | Platformatic Open Source Software + + + + + +
+
Version: Next

Fields

When Platformatic DB inspects a database's schema, it creates an object for each table that contains a mapping of their fields.

These objects contain the following properties:

  • singularName: singular entity name, based on table name. Uses inflected under the hood.
  • pluralName: plural entity name (i.e 'pages')
  • primaryKey: the field which is identified as primary key.
  • table: original table name
  • fields: an object containing all fields details. Object key is the field name.
  • camelCasedFields: an object containing all fields details in camelcase. If you have a column named user_id you can access it using both userId or user_id

Fields detail

  • sqlType: The original field type. It may vary depending on the underlying DB Engine
  • isNullable: Whether the field can be null or not
  • primaryKey: Whether the field is the primary key or not
  • camelcase: The camelcased value of the field

Example

Given this SQL Schema (for PostgreSQL):

CREATE SEQUENCE IF NOT EXISTS pages_id_seq;
CREATE TABLE "public"."pages" (
"id" int4 NOT NULL DEFAULT nextval('pages_id_seq'::regclass),
"title" varchar,
"body_content" text,
"category_id" int4,
PRIMARY KEY ("id")
);

The resulting mapping object will be:

{
singularName: 'page',
pluralName: 'pages',
primaryKey: 'id',
table: 'pages',
fields: {
id: {
sqlType: 'int4',
isNullable: false,
primaryKey: true,
camelcase: 'id'
},
title: {
sqlType: 'varchar',
isNullable: true,
camelcase: 'title'
},
body_content: {
sqlType: 'text',
isNullable: true,
camelcase: 'bodyContent'
},
category_id: {
sqlType: 'int4',
isNullable: true,
foreignKey: true,
camelcase: 'categoryId'
}
}
camelCasedFields: {
id: {
sqlType: 'int4',
isNullable: false,
primaryKey: true,
camelcase: 'id'
},
title: {
sqlType: 'varchar',
isNullable: true,
camelcase: 'title'
},
bodyContent: {
sqlType: 'text',
isNullable: true,
camelcase: 'bodyContent'
},
categoryId: {
sqlType: 'int4',
isNullable: true,
foreignKey: true,
camelcase: 'categoryId'
}
},
relations: []
}
+ + + + \ No newline at end of file diff --git a/docs/next/reference/sql-mapper/entities/hooks/index.html b/docs/next/reference/sql-mapper/entities/hooks/index.html new file mode 100644 index 00000000000..d439701e618 --- /dev/null +++ b/docs/next/reference/sql-mapper/entities/hooks/index.html @@ -0,0 +1,17 @@ + + + + + +Hooks | Platformatic Open Source Software + + + + + +
+
Version: Next

Hooks

Entity hooks are a way to wrap the API methods for an entity and add custom behaviour.

The Platformatic DB SQL Mapper provides an addEntityHooks(entityName, spec) function that can be used to add hooks for an entity.

How to use hooks

addEntityHooks accepts two arguments:

  1. A string representing the entity name (singularized), for example 'page'.
  2. A key/value object where the key is one of the API methods (find, insert, save, delete) and the value is a callback function. The callback will be called with the original API method and the options that were passed to that method. See the example below.

Usage

'use strict'
const { connect } = require('@platformatic/sql-mapper')
const { pino } = require('pino')
const pretty = require('pino-pretty')
const logger = pino(pretty())

async function main() {
const pgConnectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
const mapper = await connect({
connectionString: pgConnectionString,
log: logger,
})
mapper.addEntityHooks('page', {
find: async (originalFind, opts) => {
// Add a `foo` field with `bar` value to each row
const res = await originalFind(opts)
return res.map((row) => {
row.foo = 'bar'
return row
})
}
})
const res = await mapper.entities.page.find({
fields: ['id', 'title',],
where: {
id: {
lt: 10
}
},
})
logger.info(res)
/**
[
0: {
"id": "5",
"title": "Page 1",
"foo": "bar"
},
1: {
"id": "6",
"title": "Page 2",
"foo": "bar"
}
]
*/
await mapper.db.dispose()
}
main()

Multiple Hooks

Multiple hooks can be added for the same entity and API method, for example:

'use strict'
const { connect } = require('@platformatic/sql-mapper')
const { pino } = require('pino')
const pretty = require('pino-pretty')
const logger = pino(pretty())

async function main() {
const pgConnectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
const mapper = await connect({
connectionString: pgConnectionString,
log: logger,
})
mapper.addEntityHooks('page', {
find: async function firstHook(previousFunction, opts) {
// Add a `foo` field with `bar` value to each row
const res = await previousFunction(opts)
return res.map((row) => {
row.foo = 'bar'
return row
})
}
})
mapper.addEntityHooks('page', {
find: async function secondHook(previousFunction, opts) {
// Add a `bar` field with `baz` value to each row
const res = await previousFunction(opts)
return res.map((row) => {
row.bar = 'baz'
return row
})
}
})
const res = await mapper.entities.page.find({
fields: ['id', 'title',],
where: {
id: {
lt: 10
}
},
})
logger.info(res)
/**
[
0: {
"id": "5",
"title": "Page 1",
"foo": "bar",
"bar": "baz"
},
1: {
"id": "6",
"title": "Page 2",
"foo": "bar",
"bar": "baz"
}
]
*/
await mapper.db.dispose()
}
main()

Since hooks are wrappers, they are being called in reverse order, like the image below

Hooks Lifecycle

So even though we defined two hooks, the Database will be hit only once.

Query result will be processed by firstHook, which will pass the result to secondHook, which will, finally, send the processed result to the original .find({...}) function.

+ + + + \ No newline at end of file diff --git a/docs/next/reference/sql-mapper/entities/introduction/index.html b/docs/next/reference/sql-mapper/entities/introduction/index.html new file mode 100644 index 00000000000..0b50db99467 --- /dev/null +++ b/docs/next/reference/sql-mapper/entities/introduction/index.html @@ -0,0 +1,17 @@ + + + + + +Introduction to Entities | Platformatic Open Source Software + + + + + +
+
Version: Next

Introduction to Entities

The primary goal of Platformatic DB is to read a database schema and generate REST and GraphQL endpoints that enable the execution of CRUD (Create/Retrieve/Update/Delete) operations against the database.

Platformatic DB includes a mapper that reads the schemas of database tables and then generates an entity object for each table.

Platformatic DB is a Fastify application. The Fastify instance object is decorated with the platformatic property, which exposes several APIs that handle the manipulation of data in the database.

Platformatic DB populates the app.platformatic.entities object with data found in database tables.

The keys on the entities object are singularized versions of the table names — for example users becomes user, categories becomes category — and the values are a set of associated metadata and functions.

+ + + + \ No newline at end of file diff --git a/docs/next/reference/sql-mapper/entities/relations/index.html b/docs/next/reference/sql-mapper/entities/relations/index.html new file mode 100644 index 00000000000..d494822ecea --- /dev/null +++ b/docs/next/reference/sql-mapper/entities/relations/index.html @@ -0,0 +1,20 @@ + + + + + +Relations | Platformatic Open Source Software + + + + + +
+
Version: Next

Relations

When Platformatic DB is reading your database schema, it identifies relationships +between tables and stores metadata on them in the entity object's relations field. +This is achieved by querying the database's internal metadata.

Example

Given this PostgreSQL schema:

CREATE SEQUENCE IF NOT EXISTS categories_id_seq;

CREATE TABLE "categories" (
"id" int4 NOT NULL DEFAULT nextval('categories_id_seq'::regclass),
"name" varchar(255) NOT NULL,
PRIMARY KEY ("id")
);

CREATE SEQUENCE IF NOT EXISTS pages_id_seq;

CREATE TABLE "pages" (
"id" int4 NOT NULL DEFAULT nextval('pages_id_seq'::regclass),
"title" varchar(255) NOT NULL,
"body_content" text,
"category_id" int4,
PRIMARY KEY ("id")
);

ALTER TABLE "pages" ADD FOREIGN KEY ("category_id") REFERENCES "categories"("id");

When this code is run:

'use strict'
const { connect } = require('@platformatic/sql-mapper')
const { pino } = require('pino')
const pretty = require('pino-pretty')
const logger = pino(pretty())

async function main() {
const pgConnectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
const mapper = await connect({
connectionString: pgConnectionString,
log: logger,
})
const pageEntity = mapper.entities.page
console.log(pageEntity.relations)
await mapper.db.dispose()
}
main()

The output will be:

[
{
constraint_catalog: 'postgres',
constraint_schema: 'public',
constraint_name: 'pages_category_id_fkey',
table_catalog: 'postgres',
table_schema: 'public',
table_name: 'pages',
constraint_type: 'FOREIGN KEY',
is_deferrable: 'NO',
initially_deferred: 'NO',
enforced: 'YES',
column_name: 'category_id',
ordinal_position: 1,
position_in_unique_constraint: 1,
foreign_table_name: 'categories',
foreign_column_name: 'id'
}
]

As Platformatic DB supports multiple database engines, the contents of the +relations object will vary depending on the database being used.

The following relations fields are common to all database engines:

  • column_name — the column that stores the foreign key
  • foreign_table_name — the table hosting the related row
  • foreign_column_name — the column in foreign table that identifies the row
+ + + + \ No newline at end of file diff --git a/docs/next/reference/sql-mapper/entities/timestamps/index.html b/docs/next/reference/sql-mapper/entities/timestamps/index.html new file mode 100644 index 00000000000..ca4107f2d37 --- /dev/null +++ b/docs/next/reference/sql-mapper/entities/timestamps/index.html @@ -0,0 +1,17 @@ + + + + + +Timestamps | Platformatic Open Source Software + + + + + +
+
Version: Next

Timestamps

Timestamps can be used to automatically set the created_at and updated_at fields on your entities.

Timestamps are enabled by default

Configuration

To disable timestamps, you need to set the autoTimestamp field to false in configuration file:

{
...
"db": {
"connectionString": "postgres://postgres:postgres@127.0.0.1/postgres",
"autoTimestamp": false
},
...
}

Customizing the field names

By default, the created_at and updated_at fields are used. You can customize the field names by setting the createdAt and updatedAt options in autoTimestamp field in configuration file:

{
...
"db": {
"connectionString": "postgres://postgres:postgres@127.0.0.1/postgres",
"autoTimestamp": {
"createdAt": "inserted_at",
"updatedAt": "updated_at"
}
...
}
+ + + + \ No newline at end of file diff --git a/docs/next/reference/sql-mapper/entities/transactions/index.html b/docs/next/reference/sql-mapper/entities/transactions/index.html new file mode 100644 index 00000000000..4bdd53e57f7 --- /dev/null +++ b/docs/next/reference/sql-mapper/entities/transactions/index.html @@ -0,0 +1,18 @@ + + + + + +Transactions | Platformatic Open Source Software + + + + + +
+
Version: Next

Transactions

Platformatic DB entites support transaction through the tx optional parameter. +If the tx parameter is provided, the entity will join the transaction, e.g.:


const { connect } = require('@platformatic/sql-mapper')
const logger = pino(pretty())

async function main() {
const pgConnectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
const { db, entities} = await connect({
connectionString: pgConnectionString,
log: logger,
})

const result = await db.tx(async tx => {
// these two operations will be executed in the same transaction
const authorResult = await entities.author.save({
fields: ['id', 'name'],
input: { name: 'test'},
tx
})
const res = await entities.page.save({
fields: ['title', 'authorId'],
input: { title: 'page title', authorId: authorResult.id },
tx
})
return res
})

}

Throwing an Error triggers a transaction rollback:

    try {
await db.tx(async tx => {
await entities.page.save({
input: { title: 'new page' },
fields: ['title'],
tx
})

// here we have `new page`
const findResult = await entities.page.find({ fields: ['title'], tx })

// (...)

// We force the rollback
throw new Error('rollback')
})
} catch (e) {
// rollback
}

// no 'new page' here...
const afterRollback = await entities.page.find({ fields: ['title'] })

+ + + + \ No newline at end of file diff --git a/docs/next/reference/sql-mapper/fastify-plugin/index.html b/docs/next/reference/sql-mapper/fastify-plugin/index.html new file mode 100644 index 00000000000..ceea7055b90 --- /dev/null +++ b/docs/next/reference/sql-mapper/fastify-plugin/index.html @@ -0,0 +1,17 @@ + + + + + +sql-mapper Fastify Plugin | Platformatic Open Source Software + + + + + +
+
Version: Next

sql-mapper Fastify Plugin

The @platformatic/sql-mapper package exports a Fastify plugin that can be used out-of the box in a server application.

A connectionString option must be passed to connect to your database.

The plugin decorates the server with a platformatic object that has the following properties:

  • db — the DB wrapper object provided by @databases
  • sql — the SQL query mapper object provided by @databases
  • entities — all entity objects with their API methods
  • addEntityHooks — a function to add a hook to an entity API method.

The plugin also decorates the Fastify Request object with the following:

  • platformaticContext: an object with the following two properties:
    • app, the Fastify application of the given route
    • reply, the Fastify Reply instance matching that request

Usage

'use strict'

const Fastify = require('fastify')
const mapper = require('@platformatic/sql-mapper')

async function main() {
const app = Fastify({
logger: {
level: 'info'
}
})
app.register(mapper.plugin, {
connectionString: 'postgres://postgres:postgres@127.0.0.1/postgres'
})

app.get('/all-pages', async (req, reply) => {
// Optionally get the platformatic context.
// Passing this to all sql-mapper functions allow to apply
// authorization rules to the database queries (amongst other things).
const ctx = req.platformaticContext

// Will return all rows from 'pages' table
const res = await app.platformatic.entities.page.find({ ctx })
return res
})

await app.listen({ port: 3333 })
}

main()
+ + + + \ No newline at end of file diff --git a/docs/next/reference/sql-mapper/introduction/index.html b/docs/next/reference/sql-mapper/introduction/index.html new file mode 100644 index 00000000000..608aa7b9700 --- /dev/null +++ b/docs/next/reference/sql-mapper/introduction/index.html @@ -0,0 +1,19 @@ + + + + + +Introduction to @platformatic/sql-mapper | Platformatic Open Source Software + + + + + +
+
Version: Next

Introduction to @platformatic/sql-mapper

@platformatic/sql-mapper is the underlining utility that Platformatic DB uses to create useful utilities to +manipulate your SQL database using JavaScript.

This module is bundled with Platformatic DB via a fastify plugin +The rest of this guide shows how to use this module directly.

Install

npm i @platformatic/sql-mapper

API

connect(opts) : Promise

It will inspect a database schema and return an object containing:

  • db — A database abstraction layer from @databases
  • sql — The SQL builder from @databases
  • entities — An object containing a key for each table found in the schema, with basic CRUD operations. See Entity Reference for details.

The valid options are:

  • connectionString — The Database connection string
  • poolSize - Maximum number of connections in the connection pool. Defaults to 10.
  • log — A logger object (like Pino)
  • onDatabaseLoad — An async function that is called after the connection is established. It will receive db and sql as parameter.
  • ignore — Object used to ignore some tables from building entities. (i.e. { 'versions': true } will ignore versions table)
  • autoTimestamp — Generate timestamp automatically when inserting/updating records.
  • hooks — For each entity name (like Page) you can customize any of the entity API function. Your custom function will receive the original function as first parameter, and then all the other parameters passed to it.

createConnectionPool(opts) : Promise

It will inspect a database schema and return an object containing:

The valid options are:

  • connectionString — The Database connection string
  • poolSize - Maximum number of connections in the connection pool. Defaults to 10.
  • log — A logger object (like Pino)

This utility is useful if you just need to connect to the db without generating any entity.

Code samples

const { connect } = require('@platformatic/sql-mapper')
const { pino } = require('pino')

const logger = pino()

async function onDatabaseLoad (db, sql) {
await db.query(sql`CREATE TABLE pages (
id SERIAL PRIMARY KEY,
title VARCHAR(255) NOT NULL
);`)
}
const connectionString =
'postgres://postgres:postgres@localhost:5432/postgres'
const mapper = await connect({
connectionString,
log: logger,
onDatabaseLoad,
ignore: {},
hooks: {
Page: {
find: async function(_find, opts) {
console.log('hook called');
return await _find(opts)
}
}
}
})
const pageEntity = mapper.entities.page

await mapper.db.query(mapper.sql`SELECT * FROM pages`)
await mapper.db.find('option1', 'option2')
+ + + + \ No newline at end of file diff --git a/docs/next/reference/sql-openapi/api/index.html b/docs/next/reference/sql-openapi/api/index.html new file mode 100644 index 00000000000..6023bd3674a --- /dev/null +++ b/docs/next/reference/sql-openapi/api/index.html @@ -0,0 +1,22 @@ + + + + + +API | Platformatic Open Source Software + + + + + +
+
Version: Next

API

Each table is mapped to an entity named after table's name.

In the following reference we'll use some placeholders, but let's see an example

Example

Given this SQL executed against your database:

CREATE TABLE pages (
id SERIAL PRIMARY KEY,
title VARCHAR(255) NOT NULL,
body TEXT NOT NULL
);
  • [PLURAL_ENTITY_NAME] is pages
  • [SINGULAR_ENTITY_NAME] is page
  • [PRIMARY_KEY] is id
  • fields are id, title, body

GET and POST parameters

Some APIs needs the GET method, where parameters must be defined in the URL, or POST/PUT methods, where parameters can be defined in the http request payload.

Fields

Every API can define a fields parameter, representing the entity fields you want to get back for each row of the table. If not specified all fields are returned.

fields parameter are always sent in query string, even for POST, PUT and DELETE requests, as a comma separated value.

## `GET /[PLURAL_ENTITY_NAME]`

Return all entities matching where clause

Where clause

You can define many WHERE clauses in REST API, each clause includes a field, an operator and a value.

The field is one of the fields found in the schema.

The operator follows this table:

Platformatic operatorSQL operator
eq'='
in'IN'
nin'NOT IN'
neq'<>'
gt'>'
gte'>='
lt'<'
lte'<='

The value is the value you want to compare the field to.

For GET requests all these clauses are specified in the query string using the format where.[FIELD].[OPERATOR]=[VALUE]

Example

If you want to get the title and the body of every page where id < 15 you can make an HTTP request like this:

$ curl -X 'GET' \
'http://localhost:3042/pages/?fields=body,title&where.id.lt=15' \
-H 'accept: application/json'

Where clause operations are by default combined with the AND operator. To create an OR condition use the where.or query param.

Each where.or query param can contain multiple conditions separated by a | (pipe).

The where.or conditions are similar to the where conditions, except that they don't have the where prefix.

Example

If you want to get the posts where counter = 10 OR counter > 30 you can make an HTTP request like this:

$ curl -X 'GET' \
'http://localhost:3042/pages/?where.or=(counter.eq=10|counter.gte=30)' \
-H 'accept: application/json'

OrderBy clause

You can define the ordering of the returned rows within your REST API calls with the orderby clause using the following pattern:

?orderby.[field]=[asc | desc]

The field is one of the fields found in the schema. +The value can be asc or desc.

Example

If you want to get the pages ordered alphabetically by their titles you can make an HTTP request like this:

$ curl -X 'GET' \
'http://localhost:3042/pages?orderby.title=asc' \
-H 'accept: application/json'

Total Count

If totalCount boolean is true in query, the GET returns the total number of elements in the X-Total-Count header ignoring limit and offset (if specified).

$ curl -v -X 'GET' \
'http://localhost:3042/pages/?limit=2&offset=0&totalCount=true' \
-H 'accept: application/json'

(...)
> HTTP/1.1 200 OK
> x-total-count: 18
(...)

[{"id":1,"title":"Movie1"},{"id":2,"title":"Movie2"}]%

POST [PLURAL_ENTITY_NAME]

Creates a new row in table. Expects fields to be sent in a JSON formatted request body.

Example

$ curl -X 'POST' \
'http://localhost:3042/pages/' \
-H 'accept: application/json' \
-H 'Content-Type: application/json' \
-d '{
"title": "Hello World",
"body": "Welcome to Platformatic!"
}'

{
"id": 1,
"title": "Hello World",
"body": "Welcome to Platformatic"
}

GET [PLURAL_ENTITY_NAME]/[PRIMARY_KEY]

Returns a single row, identified by PRIMARY_KEY.

Example

$ curl -X 'GET' 'http://localhost:3042/pages/1?fields=title,body

{
"title": "Hello World",
"body": "Welcome to Platformatic"
}

POST [PLURAL_ENTITY_NAME]/[PRIMARY_KEY]

Updates a row identified by PRIMARY_KEY.

Example

$ curl -X 'POST' \
'http://localhost:3042/pages/1' \
-H 'accept: application/json' \
-H 'Content-Type: application/json' \
-d '{
"title": "Hello Platformatic!",
"body": "Welcome to Platformatic!"
}'

{
"id": 1,
"title": "Hello Platformatic!",
"body": "Welcome to Platformatic"
}

PUT [PLURAL_ENTITY_NAME]/[PRIMARY_KEY]

Same as POST [PLURAL_ENTITY_NAME]/[PRIMARY_KEY].

## `PUT [PLURAL_ENTITY_NAME]`

Updates all entities matching where clause

Example

$ curl -X 'PUT' \
'http://localhost:3042/pages?where.id.in=1,2' \
-H 'accept: application/json' \
-H 'Content-Type: application/json' \
-d '{
"title": "Updated title!",
"body": "Updated body!"
}'

[{
"id": 1,
"title": "Updated title!",
"body": "Updated body!"
},{
"id": 2,
"title": "Updated title!",
"body": "Updated body!"
}]

DELETE [PLURAL_ENTITY_NAME]/[PRIMARY_KEY]

Deletes a row identified by the PRIMARY_KEY.

Example

$ curl -X 'DELETE' 'http://localhost:3042/pages/1?fields=title'

{
"title": "Hello Platformatic!"
}

Nested Relationships

Let's consider the following SQL:

CREATE TABLE IF NOT EXISTS movies (
movie_id INTEGER PRIMARY KEY,
title TEXT NOT NULL
);
CREATE TABLE IF NOT EXISTS quotes (
id INTEGER PRIMARY KEY,
quote TEXT NOT NULL,
movie_id INTEGER NOT NULL REFERENCES movies(movie_id)
);

And:

  • [P_PARENT_ENTITY] is movies
  • [S_PARENT_ENTITY] is movie
  • [P_CHILDREN_ENTITY] is quotes
  • [S_CHILDREN_ENTITY] is quote

In this case, more APIs are available:

GET [P_PARENT_ENTITY]/[PARENT_PRIMARY_KEY]/[P_CHILDREN_ENTITY]

Given a 1-to-many relationship, where a parent entity can have many children, you can query for the children directly.

$ curl -X 'GET' 'http://localhost:3042/movies/1/quotes?fields=quote

[
{
"quote": "I'll be back"
},
{
"quote": "Hasta la vista, baby"
}
]

GET [P_CHILDREN_ENTITY]/[CHILDREN_PRIMARY_KEY]/[S_PARENT_ENTITY]

You can query for the parent directly, e.g.:

$ curl -X 'GET' 'http://localhost:3042/quotes/1/movie?fields=title

{
"title": "Terminator"
}

Many-to-Many Relationships

Many-to-Many relationship lets you relate each row in one table to many rows in +another table and vice versa.

Many-to-many relationship are implemented in SQL via a "join table", a table whose primary key +is composed by the identifier of the two parts of the many-to-many relationship.

Platformatic DB fully support many-to-many relationships on all supported database.

Let's consider the following SQL:

CREATE TABLE pages (
id INTEGER PRIMARY KEY,
the_title VARCHAR(42)
);

CREATE TABLE users (
id INTEGER PRIMARY KEY,
username VARCHAR(255) NOT NULL
);

CREATE TABLE editors (
page_id INTEGER NOT NULL,
user_id INTEGER NOT NULL,
role VARCHAR(255) NOT NULL,
CONSTRAINT fk_editor_pages FOREIGN KEY (page_id) REFERENCES pages(id),
CONSTRAINT fk_editor_users FOREIGN KEY (user_id) REFERENCES users(id),
PRIMARY KEY (page_id, user_id)
);

And:

  • [P_ENTITY] is editors
  • [P_REL_1] is pages
  • [S_REL_1] is page
  • [KEY_REL_1] is pages PRIMARY KEY: pages(id)
  • [P_REL_2] is users
  • [S_REL_2] is user
  • [KEY_REL_2] is users PRIMARY KEY: users(id)

In this case, here the APIs that are available for the join table:

GET [P_ENTITY]/[S_REL_1]/[KEY_REL_1]/[S_REL_2]/[KEY_REL_2]

This returns the entity in the "join table", e.g. GET /editors/page/1/user/1.

POST [P_ENTITY]/[S_REL_1]/[KEY_REL_1]/[S_REL_2]/[KEY_REL_2]

Creates a new entity in the "join table", e.g. POST /editors/page/1/user/1.

PUT [P_ENTITY]/[S_REL_1]/[KEY_REL_1]/[S_REL_2]/[KEY_REL_2]

Updates an entity in the "join table", e.g. PUT /editors/page/1/user/1.

DELETE [P_ENTITY]/[S_REL_1]/[KEY_REL_1]/[S_REL_2]/[KEY_REL_2]

Delete the entity in the "join table", e.g. DELETE /editors/page/1/user/1.

GET /[P_ENTITY]

See the above.

Offset only accepts values >= 0. Otherwise an error is return.

Pagination

The Platformatic DB supports for result's pagination through input parameters: limit and offset

Example

$ curl -X 'GET' 'http://localhost:3042/movies?limit=5&offset=10

[
{
"title": "Star Wars",
"movie_id": 10
},
...
{
"title": "007",
"movie_id": 14
}
]

It returns 5 movies starting from position 10.

TotalCount functionality can be used in order to evaluate if there are more pages.

Limit

By default a limit value (10) is applied to each request.

Clients can override this behavior by passing a value. +In this case the server validates the input and an error is return if exceeds the max accepted value (100).

Limit's values can be customized through configuration:

{
...
"db": {
...
"limit": {
"default": 50,
"max": 1000
}
}
}

Limit only accepts values >= 0. Otherwise an error is return.

Offset

By default offset is not applied to the request. +Clients can override this behavior by passing a value.

Offset only accepts values >= 0. Otherwise an error is return.

+ + + + \ No newline at end of file diff --git a/docs/next/reference/sql-openapi/ignore/index.html b/docs/next/reference/sql-openapi/ignore/index.html new file mode 100644 index 00000000000..a7ecd350606 --- /dev/null +++ b/docs/next/reference/sql-openapi/ignore/index.html @@ -0,0 +1,17 @@ + + + + + +Ignoring entities and fields | Platformatic Open Source Software + + + + + + + + + + \ No newline at end of file diff --git a/docs/next/reference/sql-openapi/introduction/index.html b/docs/next/reference/sql-openapi/introduction/index.html new file mode 100644 index 00000000000..2be904b42a9 --- /dev/null +++ b/docs/next/reference/sql-openapi/introduction/index.html @@ -0,0 +1,17 @@ + + + + + +Introduction to the REST API | Platformatic Open Source Software + + + + + +
+
Version: Next

Introduction to the REST API

The Platformatic DB OpenAPI plugin automatically starts a REST API server (powered by Fastify) that provides CRUD (Create, Read, Update, Delete) functionality for each entity.

Configuration

In the config file, under the "db" section, the OpenAPI server is enabled by default. Although you can disable it setting the property openapi to false.

Example

{
...
"db": {
"openapi": false
}
}

As Platformatic DB uses fastify-swagger under the hood, the "openapi" property can be an object that follows the OpenAPI Specification Object format.

This allows you to extend the output of the Swagger UI documentation.

+ + + + \ No newline at end of file diff --git a/docs/platformatic-cloud/deploy-database-neon/index.html b/docs/platformatic-cloud/deploy-database-neon/index.html new file mode 100644 index 00000000000..07afae01706 --- /dev/null +++ b/docs/platformatic-cloud/deploy-database-neon/index.html @@ -0,0 +1,32 @@ + + + + + +Deploy a PostgreSQL database with Neon | Platformatic Open Source Software + + + + + +
+
Version: 0.42.1

Deploy a PostgreSQL database with Neon

Neon offers multi-cloud fully managed +Postgres with a generous free tier. They separated storage and +compute to offer autoscaling, branching, and bottomless storage. +It offers a great environment for creating database preview +environments for your Platformatic DB +applications.

This guide shows you how to integrate Neon branch deployments with your +Platformatic app's GitHub Actions workflows. It assumes you have already +followed the Quick Start Guide.

Create a project on Neon

To set up an account with Neon, open their website, sign up and create a +new project.

Take note of the following configuration setting values:

  • The connection string for your main branch database, to be stored in a NEON_DB_URL_PRODUCTION secret
  • The Project ID (available under the project Settings), to be stored in a NEON_PROJECT_ID secret
  • Your API key (available by clicking on your user icon > Account > Developer settings), to be stored under NEON_API_KEY

You can learn more about Neon API keys in their Manage API Keys documentation.

Configure Github Environments and Secrets

Now you need to set the configuration values listed above as +repository secrets +on your project's GitHub repository. +Learn how to use environments for deployment in GitHub's documentation.

Configure the GitHub Environments for your repository to have:

  • production secrets, available only to the main branch:
    • NEON_DB_URL_PRODUCTION
  • previews secrets available to all branches:
    • NEON_PROJECT_ID
    • NEON_API_KEY

Configure the main branch workflow

Replace the contents of your app's workflow for static workspace deployment:

.github/workflows/platformatic-static-workspace-deploy.yml
name: Deploy Platformatic application to the cloud
on:
push:
branches:
- main
paths-ignore:
- 'docs/**'
- '**.md'

jobs:
build_and_deploy:
environment:
name: production
permissions:
contents: read
runs-on: ubuntu-latest
steps:
- name: Checkout application project repository
uses: actions/checkout@v4
- name: npm install --omit=dev
run: npm install --omit=dev
- name: Deploy project
uses: platformatic/onestep@latest
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
platformatic_workspace_id: <YOUR_STATIC_WORKSPACE_ID>
platformatic_workspace_key: ${{ secrets.PLATFORMATIC_STATIC_WORKSPACE_API_KEY }}
platformatic_config_path: ./platformatic.db.json
secrets: DATABASE_URL
env:
DATABASE_URL: ${{ secrets.NEON_DB_URL_PRODUCTION }}
PLT_SERVER_LOGGER_LEVEL: info
PORT: 3042
PLT_SERVER_HOSTNAME: 127.0.0.1

Replace <YOUR_STATIC_WORKSPACE_ID> with the workspace ID that you previously had in this file.

When your app is deployed to the static workspace it will now be configured to connect to the +main branch database for your Neon project.

Configure the preview environment workflow

Neon allows up to 10 database branches on their free tier. You can automatically create a new +database branch when a pull request is opened, and then automatically remove it when the pull +request is merged.

GitHub Action to create a preview environment

Replace the contents of your app's workflow for dynamic workspace deployment:

.github/workflows/platformatic-dynamic-workspace-deploy.yml
name: Deploy to Platformatic cloud
on:
pull_request:
paths-ignore:
- 'docs/**'
- '**.md'

# This allows a subsequently queued workflow run to interrupt previous runs
concurrency:
group: "${{ github.workflow }} @ ${{ github.event.pull_request.head.label || github.head_ref || github.ref }}"
cancel-in-progress: true

jobs:
build_and_deploy:
runs-on: ubuntu-latest
environment:
name: development
steps:
- name: Checkout application project repository
uses: actions/checkout@v4
- name: npm install --omit=dev
run: npm install --omit=dev
- name: Get PR number
id: get_pull_number
run: |
pull_number=$(jq --raw-output .pull_request.number "$GITHUB_EVENT_PATH")
echo "pull_number=${pull_number}" >> $GITHUB_OUTPUT
echo $pull_number
- uses: neondatabase/create-branch-action@v4
with:
project_id: ${{ secrets.NEON_PROJECT_ID }}
branch_name: pr-${{ steps.get_pull_number.outputs.pull_number }}
api_key: ${{ secrets.NEON_API_KEY }}
id: create-branch
- name: Deploy project
uses: platformatic/onestep@latest
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
platformatic_workspace_id: ${{ secrets.PLATFORMATIC_DYNAMIC_WORKSPACE_ID }}
platformatic_workspace_key: ${{ secrets.PLATFORMATIC_DYNAMIC_WORKSPACE_KEY }}
platformatic_config_path: ./platformatic.db.json
env:
DATABASE_URL: ${{ steps.create-branch.outputs.db_url }}
PLT_SERVER_LOGGER_LEVEL: info
PORT: 3042
PLT_SERVER_HOSTNAME: 127.0.0.1

Replace <YOUR_DYNAMIC_WORKSPACE_ID> with the workspace ID that you previously had in this file.

Configure preview environment cleanup

After a pull request to the main branch is merged, you should remove the matching database branch.

Create a new file, .github/workflows/cleanup-neon-branch-db.yml, and copy and paste in the following +workflow configuration:

.github/workflows/cleanup-neon-branch-db.yml
name: Cleanup Neon Database Branch
on:
push:
branches:
- 'main'
jobs:
delete-branch:
environment:
name: development
permissions: write-all
runs-on: ubuntu-latest
steps:
- name: Get PR info
id: get-pr-info
uses: actions-ecosystem/action-get-merged-pull-request@v1.0.1
with:
github_token: ${{secrets.GITHUB_TOKEN}}
- run: |
echo ${{ steps.get-pr-info.outputs.number}}
- name: Delete Neon Branch
if: ${{ steps.get-pr-info.outputs.number }}
uses: neondatabase/delete-branch-action@v3
with:
project_id: ${{ secrets.NEON_PROJECT_ID }}
branch: pr-${{ steps.get-pr-info.outputs.number }}
api_key: ${{ secrets.NEON_API_KEY }}

Deployment

To deploy these changes to your app:

  1. Create a Git branch locally (git checkout -b <BRANCH_NAME>)
  2. Commit your changes and push them to GitHub
  3. Open a pull request on GitHub - a branch will automatically be created for your Neon database and a preview app will be deployed to Platformatic Cloud (in your app's dynamic workspace).
  4. Merge the pull request - the Neon databsase branch will be automatically deleted and your app will be deployed to Platformatic Cloud (in your app's static workspace).
+ + + + \ No newline at end of file diff --git a/docs/platformatic-cloud/pricing/index.html b/docs/platformatic-cloud/pricing/index.html new file mode 100644 index 00000000000..0761ce4d0c7 --- /dev/null +++ b/docs/platformatic-cloud/pricing/index.html @@ -0,0 +1,23 @@ + + + + + +Platformatic Cloud Pricing | Platformatic Open Source Software + + + + + +
+
Version: 0.42.1

Platformatic Cloud Pricing

Find the plan that works best for you!

FreeBasicAdvancedPro
Pricing$0$4.99$22.45$49.99
Slots01512
CNAME-truetruetrue
Always On-truetruetrue

FAQ

What is a slot?

One slot is equal to one compute unit. The free plan has no always-on +machines and they will be stopped while not in use.

What is a workspace?

A workspace is the security boundary of your deployment. You will use +the same credentials to deploy to one.

A workspace can be either static or dynamic. +A static workspace always deploy to the same domain, while +in a dynamic workspace each deployment will have its own domain. +The latter are useful to provde for pull request previews.

Can I change or upgrade my plan after I start using Platformatic?

Plans can be changed or upgraded at any time

What does it mean I can set my own CNAME?

Free applications only gets a *.deploy.space domain name to access +their application. All other plans can set it to a domain of their chosing.

+ + + + \ No newline at end of file diff --git a/docs/platformatic-cloud/quick-start-guide/index.html b/docs/platformatic-cloud/quick-start-guide/index.html new file mode 100644 index 00000000000..8f433b569e1 --- /dev/null +++ b/docs/platformatic-cloud/quick-start-guide/index.html @@ -0,0 +1,45 @@ + + + + + +Cloud Quick Start Guide | Platformatic Open Source Software + + + + + +
+
Version: 0.42.1

Cloud Quick Start Guide

This guide shows you how to create and deploy an application to +Platformatic Cloud.

Prerequisites

To follow along with this guide you'll need to have these things installed:

You will also need to have a GitHub account.

Log in to Platformatic Cloud

Go to the Platformatic Cloud website and click on the +Continue with GitHub button. You'll be transferred to a GitHub page that +asks you to Authorize Platformatic Cloud. To continue, click on the +Authorize platformatic button.

Screenshot of Continue with GitHub button

On the Platformatic Cloud Service Agreements page, check the boxes and +click the Continue button. You'll then be redirected to your Cloud Dashboard page.

Create a Cloud app

Screenshot of an empty Apps page

Click the Create an app now button on your Cloud Dashboard page.

Enter quick-start-app as your application name. Click the Create Application button.

Create a static app workspace

Enter production as the name for your workspace. Then click on the Create Workspace button.

On the next page you'll see the Workspace ID and API key for your app workspace. +Copy them and store them somewhere secure for future reference, for example in a password manager app. +The API key will be used to deploy your app to the workspace that you've just created.

Click on the Back to dashboard button.

Create a dynamic app workspace

On your Cloud Dashboard, click on your app, then click on Create Workspace in the Workspaces +sidebar.

Screenshot of the create app workspace screen

The Dynamic Workspace option will be automatically enabled as you have already created a +static workspace. Dynamic workspaces can be used to deploy preview applications for GitHub +pull requests.

Enter development as the name for your workspace, then click on the Create Workspace button. +Copy the Workspace ID and API key and store them somewhere secure.

Create a GitHub repository

Go to the Create a new repository page on GitHub. +Enter quick-start-app as the Repository name for your new repository. +Click on the Add a README file checkbox and click the Create repository +button.

Add the workspace API keys as repository secrets

Go to the Settings tab on your app's GitHub repository. Click into the +Secrets and variables > Actions section and add the following secrets:

NameSecret
PLATFORMATIC_STATIC_WORKSPACE_IDYour app's static workspace ID
PLATFORMATIC_STATIC_WORKSPACE_API_KEYYour app's static workspace API key
PLATFORMATIC_DYNAMIC_WORKSPACE_IDYour app's dynamic workspace ID
PLATFORMATIC_DYNAMIC_WORKSPACE_API_KEYYour app's dynamic workspace API key

Click on the New repository secret button to add a secret.

tip

You can also use the GitHub CLI to set secrets on your GitHub repository, for example:

gh secret set \
--app actions \
--env-file <FILENAME_OF_ENV_FILE_WITH_SECRETS> \
--repos <YOUR_GITHUB_USERNAME>/<REPO_NAME>

Create a new Platformatic app

In your terminal, use Git to clone your repository from GitHub. For example:

git clone git@github.com:username/quick-start-app.git
tip

See the GitHub documentation for help with +Cloning a repository.

Now change in to the project directory:

cd quick-start-app

Now run this command to start the Platformatic creator wizard:

npm create platformatic@latest

This interactive command-line tool will ask you some questions about how you'd +like to set up your new Platformatic app. For this guide, select these options:

- Which kind of project do you want to create?     => DB
- Where would you like to create your project? => .
- Do you want to create default migrations? => yes
- Do you want to create a plugin? => yes
- Do you want to use TypeScript? => no
- Do you want to overwrite the existing README.md? => yes
- Do you want to run npm install? => yes (this can take a while)
- Do you want to apply the migrations? => yes
- Do you want to generate types? => yes
- Do you want to create the github action to deploy this application to Platformatic Cloud dynamic workspace? => yes
- Do you want to create the github action to deploy this application to Platformatic Cloud static workspace? => yes

Copy and paste your dynamic and static workspace IDs when prompted by the creator wizard.

Once the wizard is complete, you'll have a Platformatic app project in the +quick-start-app directory, with example migration files and a plugin script.

Deploy the app

In your project directory, commit your application with Git:

git add .

git commit -m "Add Platformatic app"

Now push your changes up to GitHub:

git push origin main

On the GitHub repository page in your browser click on the Actions tab. +You should now see the Platformatic Cloud deployment workflow running.

Test the deployed app

Screenshot of a static app workspace that has had an app deployed to it

Once the GitHub Actions deployment workflow has completed, go to the production workspace +for your app in Platformatic Cloud. Click on the link for the Entry Point. You should now +see the Platformatic DB app home page.

Click on the OpenAPI Documentation link to try out your app's REST API using the Swagger UI.

Screenshot of Swagger UI for a Platformatic DB app

Preview pull request changes

When a pull request is opened on your project's GitHub repository, a preview app will automatically +be deployed to your app's dynamic workspace.

To see a preview app in action, create a new Git branch:

git checkout -b add-hello-endpoint

Then open up your app's plugin.js file in your code editor. Add the following code inside +the existing empty function:

app.get('/hello', async function(request, reply) {
return { hello: 'from Platformatic Cloud' }
})

Save the changes, then commit and push them up to GitHub:

git add plugin.js

git commit -m "Add hello endpoint"

git push -u origin add-hello-endpoint

Now create a pull request for your changes on GitHub. At the bottom of the +pull request page you'll see that a deployment has been triggered to your +app's dynamic workspace.

Screenshot of checks on a GitHub pull request

Once the deployment has completed, a comment will appear on your pull request +with a link to the preview app.

Screenshot of a deployed preview app comment on a GitHub pull request

Click on the Application URL link. If you add /hello on to the URL, +you should receive a response from the endpoint that you just added to +your application.

Screenshot of a JSON response from an API endpoint

+ + + + \ No newline at end of file diff --git a/docs/reference/cli/index.html b/docs/reference/cli/index.html new file mode 100644 index 00000000000..b7327738d75 --- /dev/null +++ b/docs/reference/cli/index.html @@ -0,0 +1,44 @@ + + + + + +Platformatic CLI | Platformatic Open Source Software + + + + + +
+
Version: 0.42.1

Platformatic CLI

Installation and usage

Install the Platformatic CLI as a dependency for your project:

npm install platformatic

Once it's installed you can run it with:

npx platformatic
info

The platformatic package can be installed globally, but installing it as a +project dependency ensures that everyone working on the project is using the +same version of the Platformatic CLI.

Commands

The Platformatic CLI provides the following commands:

help

Welcome to Platformatic. Available commands are:

  • help - display this message.
  • help <command> - show more information about a command.
  • db - start Platformatic DB; type platformatic db help to know more.
  • service - start Platformatic Service; type platformatic service help to know more.
  • upgrade - upgrade the Platformatic configuration to the latest version.
  • gh - create a new gh action for Platformatic deployments.
  • deploy - deploy a Platformatic application to the cloud.
  • runtime - start Platformatic Runtime; type platformatic runtime help to know more.
  • start - start a Platformatic application.
  • frontend- create frontend code to consume the REST APIs.

compile

Compile all typescript plugins.

  $ platformatic compile

This command will compile the TypeScript plugins for each platformatic application.

deploy

Deploys an application to the Platformatic Cloud.

 $ platformatic deploy

Options:

  • -t, --type static/dynamic - The type of the workspace.
  • -c, --config FILE - Specify a configuration file to use.
  • -k, --keys FILE - Specify a path to the workspace keys file.
  • -l --label TEXT - The deploy label. Only for dynamic workspaces.
  • -e --env FILE - The environment file to use. Default: ".env"
  • -s --secrets FILE - The secrets file to use. Default: ".secrets.env"
  • --workspace-id uuid - The workspace id where the application will be deployed.
  • --workspace-key TEXT - The workspace key where the application will be deployed.
  1. To deploy a Platformatic application to the cloud, you should go to the Platformatic cloud dashboard and create a workspace.
  2. Once you have created a workspace, retrieve your workspace id and key from the workspace settings page. Optionally, you can download the provided workspace env file, which you can use with the --keys option.

ℹ️

When deploying an application to a dynamic workspace, specify the deploy --label option. You can find it on your cloud dashboard or you can specify a new one.

gh

Creates a gh action to deploy platformatic services on workspaces.

 $ platformatic gh -t dynamic

Options:

  • -w --workspace ID - The workspace ID where the service will be deployed.
  • -t, --type static/dynamic - The type of the workspace. Defaults to static.
  • -c, --config FILE - Specify a configuration file to use.
  • -b, --build - Build the service before deploying (npm run build).

If not specified, the configuration will be loaded from any of the following, in the current directory.

  • platformatic.db.json, or
  • platformatic.db.yml, or
  • platformatic.db.tml, or
  • platformatic.service.json, or
  • platformatic.service.yml, or
  • platformatic.service.tml

You can find more details about the configuration format here:

start

Start a Platformatic application with the following command:

$ platformatic start

Options:

  • -c, --config <path> - Path to the configuration file.
  • --inspect[=[host:]port] - Start the Node.js debugger. host defaults to '127.0.0.1'. port defaults to 9229. Use caution when binding to a public host:port combination.
  • --inspect-brk[=[host:]port] - Start the Node.js debugger and block until a client has attached. host defaults to '127.0.0.1'. port defaults to 9229. Use caution when binding to a public host:port combination.

upgrade

Upgrade the Platformatic schema configuration to the latest version.

 $ platformatic upgrade

Options:

  • -c, --config FILE - Specify a schema configuration file to use.

If not specified, the configuration will be loaded from any of the following, in the current directory.

  • platformatic.db.json, or
  • platformatic.db.yml, or
  • platformatic.db.tml, or
  • platformatic.service.json, or
  • platformatic.service.yml, or
  • platformatic.service.tml

You can find more details about the configuration format here:

client

platformatic client <command>

help

Create a Fastify plugin that exposes a client for a remote OpenAPI or GraphQL API.

To create a client for a remote OpenAPI API, you can use the following command:

$ platformatic client http://example.com/to/schema/file -n myclient

To create a client for a remote Graphql API, you can use the following command:

$ platformatic client http://example.com/graphql -n myclient

Instead of a URL, you can also use a local file:

$ platformatic client path/to/schema -n myclient

This will create a Fastify plugin that exposes a client for the remote API in a folder myclient +and a file named myclient.js inside it.

If platformatic config file is specified, it will be edited and a clients section will be added. +Then, in any part of your Platformatic application you can use the client.

You can use the client in your application in Javascript, calling a GraphQL endpoint:

module.exports = async function (app, opts) {
app.post('/', async (request, reply) => {
const res = await app.myclient.graphql({
query: 'query { hello }'
})
return res
})
}

or in Typescript, calling an OpenAPI endpoint:

import { FastifyInstance } from 'fastify'
/// <reference path="./myclient" />

export default async function (app: FastifyInstance) {
app.get('/', async () => {
return app.myclient.get({})
})
}

Options:

  • -c, --config <path> - Path to the configuration file.
  • -n, --name <name> - Name of the client.
  • -f, --folder <name> - Name of the plugin folder, defaults to --name value.
  • -t, --typescript - Generate the client plugin in TypeScript.
  • --full-response - Client will return full response object rather than just the body.
  • --full-request - Client will be called with all parameters wrapped in body, headers and query properties.
  • --full - Enables both --full-request and --full-response overriding them.
  • --optional-headers <headers> - Comma separated string of headers that will be marked as optional in the type file
  • --validate-response - If set, will validate the response body against the schema.

composer

platformatic composer <command>

help

Available commands:

  • help - show this help message.
  • help <command> - shows more information about a command.
  • start - start the server.
  • openapi schemas fetch - fetch OpenAPI schemas from services.

openapi schemas fetch

Fetch OpenAPI schemas from remote services to use in your Platformatic project.

  $ platformatic composer openapi schemas fetch

It will fetch all the schemas from the remote services and store them by path +set in the platformatic.composer.json file. If the path is not set, it will +skip fetching the schema.

start

Start the Platformatic Composer server with the following command:

 $ platformatic composer start

You will need a configuration file. Here is an example to get you started, +save the following as platformatic.composer.json:

  {
"server": {
"hostname": "127.0.0.1",
"port": 0,
"logger": {
"level": "info"
}
},
"composer": {
"services": [
{
"id": "service1",
"origin": "http://127.0.0.1:3051",
"openapi": {
"url": "/documentation/json"
}
},
{
"id": "service2",
"origin": "http://127.0.0.1:3052",
"openapi": {
"file": "./schemas/service2.openapi.json"
}
}
],
"refreshTimeout": 1000
}
}

By sending the SIGUSR2 signal, the server can be reloaded.

Options:

  • -c, --config FILE - Specify a configuration file to use.

If not specified, the configuration will be loaded from any of the following, in the current directory.

  • platformatic.composer.json, or
  • platformatic.composer.yml, or
  • platformatic.composer.tml

You can find more details about the configuration format here:

db

platformatic db <command>

compile

Compile typescript plugins.

  $ platformatic db compile

As a result of executing this command, the Platformatic DB will compile typescript +plugins in the outDir directory.

If not specified, the configuration will be loaded from any of the following, in the current directory.

  • platformatic.db.json, or
  • platformatic.db.yml, or
  • platformatic.db.tml

You can find more details about the configuration format here:

help

Available commands:

  • help - show this help message.
  • help <command> - shows more information about a command.
  • start - start the server.
  • compile - compile typescript plugins.
  • seed - run a seed file.
  • types - generate typescript types for entities.
  • schema - generate and print api schema.
  • migrations create - generate do and undo migration files.
  • migrations apply - apply migration files.

migrations apply

Apply all configured migrations to the database:

  $ platformatic db migrations apply

The migrations will be applied in the order they are specified in the +folder defined in the configuration file. If you want to apply a specific migration, +you can use the --to option:

  $ platformatic db migrations apply --to 001

Here is an example migration:

  CREATE TABLE graphs (
id SERIAL PRIMARY KEY,
name TEXT
);

You can always rollback to a specific migration with:

  $ platformatic db migrations apply --to VERSION

Use 000 to reset to the initial state.

Options:

  • -c, --config <path> - Path to the configuration file.
  • -t, --to <version> - Migrate to a specific version.

If not specified, the configuration will be loaded from any of the following, in the current directory.

  • platformatic.db.json, or
  • platformatic.db.yml, or
  • platformatic.db.tml

You can find more details about the configuration format here:

migrations create

Create next migration files.

  $ platformatic db migrations create

It will generate do and undo sql files in the migrations folder. The name of the +files will be the next migration number.

  $ platformatic db migrations create --name "create_users_table"

Options:

  • -c, --config <path> - Path to the configuration file.

If not specified, the configuration will be loaded from any of the following, in the current directory.

  • platformatic.db.json, or
  • platformatic.db.yml, or
  • platformatic.db.tml

You can find more details about the configuration format here:

migrations

Available commands:

  • migrations create - generate do and undo migration files.
  • migrations apply - apply migration files.

schema

Update the config schema file:

  • schema config - update the JSON schema config available on platformatic.db.schema.json

Your configuration on platformatic.db.json has a schema defined to improve the developer experience and avoid mistakes when updating the configuration of Platformatic DB. +When you run platformatic db init, a new JSON $schema property is added in platformatic.db.schema.json. This can allow your IDE to add suggestions (f.e. mandatory/missing fields, types, default values) by opening the config in platformatic.db.json. +Running platformatic db schema config you can update your schema so that it matches well the latest changes available on your config.

Generate a schema from the database and prints it to standard output:

  • schema graphql - generate the GraphQL schema
  • schema openapi - generate the OpenAPI schema

Options:

  • -c, --config FILE - Specify a configuration file to use.

If not specified, the configuration will be loaded from any of the following, in the current directory.

  • platformatic.db.json, or
  • platformatic.db.yml, or
  • platformatic.db.tml

You can find more details about the configuration format here:

seed

Load a seed into the database. This is a convenience method that loads +a JavaScript file and configure @platformatic/sql-mapper to connect to +the database specified in the configuration file.

Here is an example of a seed file:

  'use strict'

module.exports = async function ({ entities, db, sql }) {
await entities.graph.save({ input: { name: 'Hello' } })
await db.query(sql`
INSERT INTO graphs (name) VALUES ('Hello 2');
`)
}

You can run this using the seed command:

  $ platformatic db seed seed.js

Options:

  • --config - Path to the configuration file.

If not specified, the configuration will be loaded from any of the following, in the current directory.

  • platformatic.db.json, or
  • platformatic.db.yml, or
  • platformatic.db.tml

You can find more details about the configuration format here:

start

Start the Platformatic DB server with the following command:

 $ platformatic db start

You will need a configuration file. Here is an example to get you started, +save the following as platformatic.db.json:

  {
"server": {
"hostname": "127.0.0.1",
"port": 0,
"logger": {
"level": "info"
}
},
"db": {
"connectionString": "sqlite://./db"
},
"migrations": {
"dir": "./migrations"
}
}

Remember to create a migration, run the db help migrate command to know more.

All outstanding migrations will be applied to the database unless the +migrations.autoApply configuration option is set to false.

By sending the SIGUSR2 signal, the server can be reloaded.

Options:

  • -c, --config FILE - Specify a configuration file to use.

If not specified, the configuration will be loaded from any of the following, in the current directory.

  • platformatic.db.json, or
  • platformatic.db.yml, or
  • platformatic.db.tml

You can find more details about the configuration format here:

types

Generate typescript types for your entities from the database.

  $ platformatic db types

As a result of executing this command, the Platformatic DB will generate a types +folder with a typescript file for each database entity. It will also generate a +global.d.ts file that injects the types into the Application instance.

In order to add type support to your plugins, you need to install some additional +dependencies. To do this, copy and run an npm install command with dependencies +that "platformatic db types" will ask you.

Here is an example of a platformatic plugin.js with jsdoc support. +You can use it to add autocomplete to your code.

/// <reference path="./global.d.ts" />
'use strict'

/** @param {import('fastify').FastifyInstance} app */
module.exports = async function (app) {
app.get('/movie', async () => {
const movies = await app.platformatic.entities.movie.find({
where: { title: { eq: 'The Hitchhiker\'s Guide to the Galaxy' } }
})
return movies[0].id
})
}

If not specified, the configuration will be loaded from any of the following, in the current directory.

  • platformatic.db.json, or
  • platformatic.db.yml, or
  • platformatic.db.tml

You can find more details about the configuration format here:

service

platformatic service <command>

compile

Compile typescript plugins.

  $ platformatic service compile

As a result of executing this command, Platformatic Service will compile typescript +plugins in the outDir directory.

If not specified, the configuration will be loaded from any of the following, in the current directory.

  • platformatic.service.json, or
  • platformatic.service.yml, or
  • platformatic.service.tml

You can find more details about the configuration format here:

help

Available commands:

  • help - show this help message.
  • help <command> - shows more information about a command.
  • start - start the server.
  • schema config - generate the schema configuration file.

schema

Update the config schema file:

  • schema config - update the JSON schema config available on platformatic.service.schema.json

Your configuration on platformatic.service.json has a schema defined to improve the developer experience and avoid mistakes when updating the configuration of Platformatic Service. +When you initialize a new Platformatic service (f.e. running npm create platformatic@latest), a new JSON $schema property is added in the platformatic.service.json config. This can allow your IDE to add suggestions (f.e. mandatory/missing fields, types, default values) by opening the config in platformatic.service.json. +Running platformatic service schema config you can update your schema so that it matches well the latest changes available on your config.

start

Start the Platformatic Service with the following command:

 $ platformatic service start

You will need a configuration file. Here is an example to get you started, +save the following as platformatic.service.json:

{
"server": {
"hostname": "127.0.0.1",
"port": 0,
"logger": {
"level": "info"
}
},
"plugin": {
"path": "./plugin.js"
}
}

frontend

platformatic frontend <url> <language>

Create frontend code to consume the REST APIs of a Platformatic application.

From the directory you want the frontend code to be generated (typically <YOUR_FRONTEND_APP_DIRECTORY>/src/) run -

npx platformatic frontend http://127.0.0.1:3042 ts

ℹ️

Where http://127.0.0.1:3042 must be replaced with your Platformatic application endpoint, and the language can either be ts or js. When the command is run, the Platformatic CLI generates -

  • api.d.ts - A TypeScript module that includes all the OpenAPI-related types.
  • api.ts or api.js - A module that includes a function for every single REST endpoint.

If you use the --name option it will create custom file names.

npx platformatic frontend http://127.0.0.1:3042 ts --name foobar

Will create foobar.ts and foobar-types.d.ts

Refer to the dedicated guide where the full process of generating and consuming the frontend code is described.

In case of problems, please check that:

  • The Platformatic app URL is valid.
  • The Platformatic app whose URL belongs must be up and running.
  • OpenAPI must be enabled (db.openapi in your platformatic.db.json is not set to false). You can find more details about the db configuration format here.
  • CORS must be managed in your Platformatic app (server.cors.origin.regexp in your platformatic.db.json is set to /*/, for instance). You can find more details about the cors configuration here.

runtime

platformatic runtime <command>

compile

Compile all typescript plugins for all services.

  $ platformatic runtime compile

This command will compile the TypeScript +plugins for each services registered in the runtime.

help

Available commands:

  • help - show this help message.
  • help <command> - shows more information about a command.
  • start - start the application.

start

Start the Platformatic Runtime with the following command:

 $ platformatic runtime start

start

Start a Platformatic application with the following command:

$ platformatic start

Options:

  • -c, --config <path> - Path to the configuration file.
  • --inspect[=[host:]port] - Start the Node.js debugger. host defaults to '127.0.0.1'. port defaults to 9229. Use caution when binding to a public host:port combination.
  • --inspect-brk[=[host:]port] - Start the Node.js debugger and block until a client has attached. host defaults to '127.0.0.1'. port defaults to 9229. Use caution when binding to a public host:port combination.
+ + + + \ No newline at end of file diff --git a/docs/reference/client/frontend/index.html b/docs/reference/client/frontend/index.html new file mode 100644 index 00000000000..c43266d2d8f --- /dev/null +++ b/docs/reference/client/frontend/index.html @@ -0,0 +1,17 @@ + + + + + +Frontend client | Platformatic Open Source Software + + + + + +
+
Version: 0.42.1

Frontend client

Create implementation and type files that exposes a client for a remote OpenAPI server, that uses fetch and can run in any browser.

To create a client for a remote OpenAPI API, you can use the following command:

$ platformatic frontend http://exmaple.com/to/schema/file <language> --name <clientname>

where <language> can be either js or ts.

This will create two files clientname.js (or clientname.ts) and clientname-types.d.ts for types.

clientname by default is api

Usage

The implementation generated by the tool exports all the named operation found and a factory object.

Named operations

import { setBaseUrl, getMovies } from './api.js'

setBaseUrl('http://my-server-url.com') // modifies the global `baseUrl` variable

const movies = await getMovies({})
console.log(movies)

Factory

The factory object is called build and can be used like this

import build from './api.js'

const client = build('http://my-server-url.com')

const movies = await client.getMovies({})
console.log(movies)

You can use both named operations and the factory in the same file. They can work on different hosts, so the factory does not use the global setBaseUrl function.

Generated Code

The type file will look like this

export interface GetMoviesRequest {
'limit'?: number;
'offset'?: number;
// ... all other options
}

interface GetMoviesResponseOK {
'id': number;
'title': string;
}
export interface Api {
setBaseUrl(newUrl: string) : void;
getMovies(req: GetMoviesRequest): Promise<Array<GetMoviesResponseOK>>;
// ... all operations listed here
}

type PlatformaticFrontendClient = Omit<Api, 'setBaseUrl'>
export default function build(url: string): PlatformaticFrontendClient

The javascript implementation will look like this

let baseUrl = ''
/** @type {import('./api-types.d.ts').Api['setBaseUrl']} */
export const setBaseUrl = (newUrl) => { baseUrl = newUrl }

/** @type {import('./api-types.d.ts').Api['getMovies']} */
export const getMovies = async (request) => {
return await _getMovies(baseUrl, request)
}
async function _createMovie (url, request) {
const response = await fetch(`${url}/movies/`, {
method:'post',
body: JSON.stringify(request),
headers: {
'Content-Type': 'application/json'
}
})

if (!response.ok) {
throw new Error(await response.text())
}

return await response.json()
}

/** @type {import('./api-types.d.ts').Api['createMovie']} */
export const createMovie = async (request) => {
return await _createMovie(baseUrl, request)
}
// ...

export default function build (url) {
return {
getMovies: _getMovies.bind(url, ...arguments),
// ...
}
}

The typescript implementation will look like this

import type { Api } from './api-types'
import * as Types from './api-types'

let baseUrl = ''
export const setBaseUrl = (newUrl: string) : void => { baseUrl = newUrl }

const _getMovies = async (url: string, request: Types.GetMoviesRequest) => {
const response = await fetch(`${url}/movies/?${new URLSearchParams(Object.entries(request || {})).toString()}`)

if (!response.ok) {
throw new Error(await response.text())
}

return await response.json()
}

export const getMovies: Api['getMovies'] = async (request: Types.GetMoviesRequest) => {
return await _getMovies(baseUrl, request)
}
// ...
export default function build (url) {
return {
getMovies: _getMovies.bind(url, ...arguments),
// ...
}
}
+ + + + \ No newline at end of file diff --git a/docs/reference/client/introduction/index.html b/docs/reference/client/introduction/index.html new file mode 100644 index 00000000000..016d9c982ac --- /dev/null +++ b/docs/reference/client/introduction/index.html @@ -0,0 +1,34 @@ + + + + + +Platformatic Client | Platformatic Open Source Software + + + + + +
+
Version: 0.42.1

Platformatic Client

Create a Fastify plugin that exposes a client for a remote OpenAPI or GraphQL API.

To create a client for a remote OpenAPI API, you can use the following command:

$ platformatic client http://exmaple.com/to/schema/file --name myclient

To create a client for a remote Graphql API, you can use the following command:

$ platformatic client http://exmaple.com/grapqhl --name myclient

Usage with Platformatic Service or Platformatic DB

If you run the generator in in a Platformatic application, and it will +automatically extend it to load your client by editing the configuration file +and adding a clients section. +Then, in any part of your Platformatic application you can use the client.

You can use the client in your application in Javascript, calling a GraphQL endpoint:

// Use a typescript reference to set up autocompletion
// and explore the generated APIs.

/// <reference path="./myclient" />

/** @type {import('fastify').FastifyPluginAsync<{} */
module.exports = async function (app, opts) {
app.post('/', async (request, reply) => {
const res = await app.myclient.graphql({
query: 'query { movies { title } }'
})
return res
})
}

or in Typescript, calling an OpenAPI endpoint:

import { FastifyInstance } from 'fastify'
/// <reference path="./myclient" />

export default async function (app: FastifyInstance) {
app.get('/', async () => {
return app.myclient.get({})
})
}

The client configuration in the platformatic.db.json and platformatic.service.json would look like:

{
"clients": [{
"schema": "./myclient/myclient.openapi.json" // or ./myclient/myclient.schema.graphl
"name": "myclient",
"type": "openapi" // or graphql
"url": "{ PLT_MYCLIENT_URL }"
}]
}

Note that the generator would also have updated the .env and .env.sample files if they exists.

Generating a client for a service running within Platformatic Runtime

Platformatic Runtime allows you to create a network of services that are not exposed. +To create a client to invoke one of those services from another, run:

$ platformatic client --name <clientname> --runtime <serviceId>

Where <clientname> is the name of the client and <serviceId> is the id of the given service +(which correspond in the basic case with the folder name of that service). +The client generated is identical to the one in the previous section.

Note that this command looks for a platformatic.runtime.json in a parent directory.

Example

As an example, consider a network of three microservices:

  • somber-chariot, an instance of Platformatic DB;
  • languid-noblemen, an instance of Platformatic Service;
  • pricey-paesant, an instance of Platformatic Composer, which is also the runtime entrypoint.

From within the languid-noblemen folder, we can run:

$ platformatic client --name chariot --runtime somber-chariot

The client configuration in the platformatic.db.json and platformatic.service.json would look like:

{
"clients": [{
"path": "./chariot",
"serviceId": "somber-chariot"
}]
}

Even if the client is generated from an HTTP endpoint, it is possible to add a serviceId property each client object shown above. +This is not required, but if using the Platformatic Runtime, the serviceId +property will be used to identify the service dependency.

Types Generator

The types for the client are automatically generated for both OpenAPI and GraphQL schemas.

You can generate only the types with the --types-only flag.

For example

$ platformatic client http://exmaple.com/to/schema/file --name myclient --types-only

Will create the single myclient.d.ts file in current directory

OpenAPI

We provide a fully typed experience for OpenAPI, Typing both the request and response for +each individual OpenAPI operation.

Consider this example:

// Omitting all the individual Request and Reponse payloads for brevity

interface Client {
getMovies(req: GetMoviesRequest): Promise<Array<GetMoviesResponse>>;
createMovie(req: CreateMovieRequest): Promise<CreateMovieResponse>;
updateMovies(req: UpdateMoviesRequest): Promise<Array<UpdateMoviesResponse>>;
getMovieById(req: GetMovieByIdRequest): Promise<GetMovieByIdResponse>;
updateMovie(req: UpdateMovieRequest): Promise<UpdateMovieResponse>;
updateMovie(req: UpdateMovieRequest): Promise<UpdateMovieResponse>;
deleteMovies(req: DeleteMoviesRequest): Promise<DeleteMoviesResponse>;
getQuotesForMovie(req: GetQuotesForMovieRequest): Promise<Array<GetQuotesForMovieResponse>>;
getQuotes(req: GetQuotesRequest): Promise<Array<GetQuotesResponse>>;
createQuote(req: CreateQuoteRequest): Promise<CreateQuoteResponse>;
updateQuotes(req: UpdateQuotesRequest): Promise<Array<UpdateQuotesResponse>>;
getQuoteById(req: GetQuoteByIdRequest): Promise<GetQuoteByIdResponse>;
updateQuote(req: UpdateQuoteRequest): Promise<UpdateQuoteResponse>;
updateQuote(req: UpdateQuoteRequest): Promise<UpdateQuoteResponse>;
deleteQuotes(req: DeleteQuotesRequest): Promise<DeleteQuotesResponse>;
getMovieForQuote(req: GetMovieForQuoteRequest): Promise<GetMovieForQuoteResponse>;
}

type ClientPlugin = FastifyPluginAsync<NonNullable<client.ClientOptions>>

declare module 'fastify' {
interface FastifyInstance {
'client': Client;
}

interface FastifyRequest {
'client': Client;
}
}

declare namespace Client {
export interface ClientOptions {
url: string
}
export const client: ClientPlugin;
export { client as default };
}

declare function client(...params: Parameters<ClientPlugin>): ReturnType<ClientPlugin>;
export = client;

GraphQL

We provide a partially typed experience for GraphQL, because we do not want to limit +how you are going to query the remote system. Take a look at this example:

declare module 'fastify' {
interface GraphQLQueryOptions {
query: string;
headers: Record<string, string>;
variables: Record<string, unknown>;
}
interface GraphQLClient {
graphql<T>(GraphQLQuery): PromiseLike<T>;
}
interface FastifyInstance {
'client'
: GraphQLClient;

}

interface FastifyRequest {
'client'<T>(GraphQLQuery): PromiseLike<T>;
}
}

declare namespace client {
export interface Clientoptions {
url: string
}
export interface Movie {
'id'?: string;

'title'?: string;

'realeasedDate'?: string;

'createdAt'?: string;

'preferred'?: string;

'quotes'?: Array<Quote>;

}
export interface Quote {
'id'?: string;

'quote'?: string;

'likes'?: number;

'dislikes'?: number;

'movie'?: Movie;

}
export interface MoviesCount {
'total'?: number;

}
export interface QuotesCount {
'total'?: number;

}
export interface MovieDeleted {
'id'?: string;

}
export interface QuoteDeleted {
'id'?: string;

}
export const client: Clientplugin;
export { client as default };
}

declare function client(...params: Parameters<Clientplugin>): ReturnType<Clientplugin>;
export = client;

Given only you can know what GraphQL query you are producing, you are responsible for typing +it accordingly.

Usage with standalone Fastify

If a platformatic configuration file is not found, a complete Fastify plugin is generated to be +used in your Fastify application like so:

const fastify = require('fastify')()
const client = require('./your-client-name')

fastify.register(client, {
url: 'http://example.com'
})

// GraphQL
fastify.post('/', async (request, reply) => {
const res = await request.movies.graphql({
query: 'mutation { saveMovie(input: { title: "foo" }) { id, title } }'
})
return res
})

// OpenAPI
fastify.post('/', async (request, reply) => {
const res = await request.movies.createMovie({ title: 'foo' })
return res
})

fastify.listen({ port: 3000 })

Note that you would need to install @platformatic/client as a depedency.

How are the method names defined in OpenAPI

The names of the operations are defined in the OpenAPI specification. +Specifically, we use the operationId. +If that's not part of the spec, +the name is generated by combining the parts of the path, +like /something/{param1}/ and a method GET, it genertes getSomethingParam1.

Authentication

It's very common that downstream services requires some form of Authentication. +How could we add the necessary headers? You can configure them from your plugin:

/// <reference path="./myclient" />

/** @type {import('fastify').FastifyPluginAsync<{} */
module.exports = async function (app, opts) {
app.configureMyclient({
async getHeaders (req, reply) {
return {
'foo': 'bar'
}
}
})

app.post('/', async (request, reply) => {
const res = await app.myclient.graphql({
query: 'query { movies { title } }'
})
return res
})
}

Telemetry propagation

To correctly propagate telemetry information, be sure to get the client from the request object, e.g.:

fastify.post('/', async (request, reply) => {
const res = await request.movies.createMovie({ title: 'foo' })
return res
})
+ + + + \ No newline at end of file diff --git a/docs/reference/client/programmatic/index.html b/docs/reference/client/programmatic/index.html new file mode 100644 index 00000000000..8bd28fb9b4b --- /dev/null +++ b/docs/reference/client/programmatic/index.html @@ -0,0 +1,17 @@ + + + + + +Programmatic API | Platformatic Open Source Software + + + + + +
+
Version: 0.42.1

Programmatic API

It is possible to use the Platformatic client without the generator.

OpenAPI Client

import { buildOpenAPIClient } from '@platformatic/client'

const client = await buildOpenAPIClient({
url: `https://yourapi.com/documentation/json`,
// path: 'path/to/openapi.json',
headers: {
'foo': 'bar'
}
})

const res = await client.yourOperationName({ foo: 'bar' })

console.log(res)

If you use Typescript you can take advantage of the generated types file

import { buildOpenAPIClient } from '@platformatic/client'
import Client from './client'
//
// interface Client {
// getMovies(req: GetMoviesRequest): Promise<Array<GetMoviesResponse>>;
// createMovie(req: CreateMovieRequest): Promise<CreateMovieResponse>;
// ...
// }
//

const client: Client = await buildOpenAPIClient<Client>({
url: `https://yourapi.com/documentation/json`,
// path: 'path/to/openapi.json',
headers: {
'foo': 'bar'
}
})

const res = await client.getMovies()
console.log(res)

GraphQL Client

import { buildGraphQLClient } from '@platformatic/client'

const client = await buildGraphQLClient({
url: `https://yourapi.com/graphql`,
headers: {
'foo': 'bar'
}
})

const res = await client.graphql({
query: `
mutation createMovie($title: String!) {
saveMovie(input: {title: $title}) {
id
title
}
}
`,
variables: {
title: 'The Matrix'
}
})

console.log(res)
+ + + + \ No newline at end of file diff --git a/docs/reference/composer/api-modification/index.html b/docs/reference/composer/api-modification/index.html new file mode 100644 index 00000000000..0fb8d58b60c --- /dev/null +++ b/docs/reference/composer/api-modification/index.html @@ -0,0 +1,19 @@ + + + + + +API modification | Platformatic Open Source Software + + + + + +
+
Version: 0.42.1

API modification

If you want to modify automatically generated API, you can use composer custom onRoute hook.

addComposerOnRouteHook(openApiPath, methods, handler)

  • openApiPath (string) - A route OpenAPI path that Platformatic Composer takes from the OpenAPI specification.
  • methods (string[]) - Route HTTP methods that Platformatic Composer takes from the OpenAPI specification.
  • handler (function) - fastify onRoute hook handler.

onComposerResponse

onComposerResponse hook is called after the response is received from a composed service. +It might be useful if you want to modify the response before it is sent to the client. +If you want to use it you need to add onComposerResponse property to the config object of the route options.

  • request (object) - fastify request object.
  • reply (object) - fastify reply object.
  • body (object) - undici response body object.

Example

app.platformatic.addComposerOnRouteHook('/users/{id}', ['GET'], routeOptions => {
routeOptions.schema.response[200] = {
type: 'object',
properties: {
firstName: { type: 'string' },
lastName: { type: 'string' }
}
}

async function onComposerResponse (request, reply, body) {
const payload = await body.json()
const newPayload = {
firstName: payload.first_name,
lastName: payload.last_name
}
reply.send(newPayload)
}
routeOptions.config.onComposerResponse = onComposerResponse
})
+ + + + \ No newline at end of file diff --git a/docs/reference/composer/configuration/index.html b/docs/reference/composer/configuration/index.html new file mode 100644 index 00000000000..43bc12d77e7 --- /dev/null +++ b/docs/reference/composer/configuration/index.html @@ -0,0 +1,38 @@ + + + + + +Configuration | Platformatic Open Source Software + + + + + +
+
Version: 0.42.1

Configuration

Platformatic Composer configured with a configuration file. It supports the use +of environment variables as setting values with configuration placeholders.

Configuration file

If the Platformatic CLI finds a file in the current working directory matching +one of these filenames, it will automatically load it:

  • platformatic.composer.json
  • platformatic.composer.json5
  • platformatic.composer.yml or platformatic.composer.yaml
  • platformatic.composer.tml or platformatic.composer.toml

Alternatively, a --config option with a configuration +filepath can be passed to most platformatic composer CLI commands.

The configuration examples in this reference use JSON.

Supported formats

FormatExtensions
JSON.json
JSON5.json5
YAML.yml, .yaml
TOML.tml

Comments are supported by the JSON5, YAML and TOML file formats.

Settings

Configuration settings are organised into the following groups:

Sensitive configuration settings containing sensitive data should be set using configuration placeholders.

server

A required object with the following settings:

  • hostname (required, string) — Hostname where Platformatic Composer server will listen for connections.

  • port (required, number) — Port where Platformatic Composer server will listen for connections.

  • healthCheck (boolean or object) — Enables the health check endpoint.

    • Powered by @fastify/under-pressure.
    • The value can be an object, used to specify the interval between checks in milliseconds (default: 5000)

    Example

    {
    "server": {
    ...
    "healthCheck": {
    "interval": 2000
    }
    }
    }
  • cors (object) — Configuration for Cross-Origin Resource Sharing (CORS) headers.

    • All options will be passed to the @fastify/cors plugin. In order to specify a RegExp object, you can pass { regexp: 'yourregexp' }, +it will be automatically converted.
  • logger (object) -- the logger configuration.

  • pluginTimeout (integer) -- the number of milliseconds to wait for a Fastify plugin to load, see the fastify docs for more details.

  • https (object) - Configuration for HTTPS supporting the following options.

    • key (required, string, object, or array) - If key is a string, it specifies the private key to be used. If key is an object, it must have a path property specifying the private key file. Multiple keys are supported by passing an array of keys.
    • cert (required, string, object, or array) - If cert is a string, it specifies the certificate to be used. If cert is an object, it must have a path property specifying the certificate file. Multiple certificates are supported by passing an array of keys.

metrics

Configuration for a Prometheus server that will export monitoring metrics +for the current server instance. It uses fastify-metrics +under the hood.

This setting can be a boolean or an object. If set to true the Prometheus server will listen on http://0.0.0.0:9090.

Supported object properties:

  • hostname (string) — The hostname where Prometheus server will listen for connections.
  • port (number) — The port where Prometheus server will listen for connections.
  • auth (object) — Basic Auth configuration. username and password are required here +(use environment variables).

plugins

An optional object that defines the plugins loaded by Platformatic Composer.

  • paths (required, array): an array of paths (string) +or an array of objects composed as follows,

    • path (string): Relative path to plugin's entry point.
    • options (object): Optional plugin options.
    • encapsulate (boolean): if the path is a folder, it instruct Platformatic to not encapsulate those plugins.
    • maxDepth (integer): if the path is a folder, it limits the depth to load the content from.
  • typescript (boolean): enable typescript compilation. A tsconfig.json file is required in the same folder.

    Example

    {
    "plugins": {
    "paths": [{
    "path": "./my-plugin.js",
    "options": {
    "foo": "bar"
    }
    }]
    }
    }

watch

Disable watching for file changes if set to false. It can also be customized with the following options:

  • ignore (string[], default: null): List of glob patterns to ignore when watching for changes. If null or not specified, ignore rule is not applied. Ignore option doesn't work for typescript files.

  • allow (string[], default: ['*.js', '**/*.js']): List of glob patterns to allow when watching for changes. If null or not specified, allow rule is not applied. Allow option doesn't work for typescript files.

    Example

    {
    "watch": {
    "ignore": ["*.mjs", "**/*.mjs"],
    "allow": ["my-plugin.js", "plugins/*.js"]
    }
    }

composer

Configure @platformatic/composer specific settings such as services or refreshTimeout:

  • services (array, default: []) — is an array of objects that defines +the services managed by the composer. Each service object supports the following settings:

    • id (required, string) - A unique identifier for the service.
    • origin (string) - A service origin. Skip this option if the service is executing inside of Platformatic Runtime. In this case, service id will be used instead of origin.
    • openapi (required, object) - The configuration file used to compose OpenAPI specification. See the openapi for details.
    • proxy (object or false) - Service proxy configuration. If false, the service proxy is disabled.
      • prefix (required, string) - Service proxy prefix. All service routes will be prefixed with this value.
    • refreshTimeout (number) - The number of milliseconds to wait for check for changes in the service OpenAPI specification. If not specified, the default value is 1000.

openapi

  • url (string) - A path of the route that exposes the OpenAPI specification. If a service is a Platformatic Service or Platformatic DB, use /documentation/json as a value. Use this or file option to specify the OpenAPI specification.
  • file (string) - A path to the OpenAPI specification file. Use this or url option to specify the OpenAPI specification.
  • prefix (string) - A prefix for the OpenAPI specification. All service routes will be prefixed with this value.
  • config (string) - A path to the OpenAPI configuration file. This file is used to customize the OpenAPI specification. See the openapi-configuration for details.
openapi-configuration

The OpenAPI configuration file is a JSON file that is used to customize the OpenAPI specification. It supports the following options:

  • ignore (boolean) - If true, the route will be ignored by the composer. +If you want to ignore a specific method, use the ignore option in the nested method object.

    Example

    {
    "paths": {
    "/users": {
    "ignore": true
    },
    "/users/{id}": {
    "get": { "ignore": true },
    "put": { "ignore": true }
    }
    }
    }
  • alias (string) - Use it create an alias for the route path. Original route path will be ignored.

    Example

    {
    "paths": {
    "/users": {
    "alias": "/customers"
    }
    }
    }
  • rename (string) - Use it to rename composed route response fields. +Use json schema format to describe the response structure. For now it works only for 200 response.

    Example

    {
    "paths": {
    "/users": {
    "responses": {
    "200": {
    "type": "array",
    "items": {
    "type": "object",
    "properties": {
    "id": { "rename": "user_id" },
    "name": { "rename": "first_name" }
    }
    }
    }
    }
    }
    }
    }

Examples

Composition of two remote services:

{
"composer": {
"services": [
{
"id": "auth-service",
"origin": "https://auth-service.com",
"openapi": {
"url": "/documentation/json",
"prefix": "auth"
}
},
{
"id": "payment-service",
"origin": "https://payment-service.com",
"openapi": {
"file": "./schemas/payment-service.json"
}
}
],
"refreshTimeout": 1000
}
}

Composition of two local services inside of Platformatic Runtime:

{
"composer": {
"services": [
{
"id": "auth-service",
"openapi": {
"url": "/documentation/json",
"prefix": "auth"
}
},
{
"id": "payment-service",
"openapi": {
"file": "./schemas/payment-service.json"
}
}
],
"refreshTimeout": 1000
}
}

telemetry

Open Telemetry is optionally supported with these settings:

  • serviceName (required, string) — Name of the service as will be reported in open telemetry.
  • version (string) — Optional version (free form)
  • skip (array). Optional list of operations to skip when exporting telemetry defined object with properties:
    • method: GET, POST, PUT, DELETE, PATCH, HEAD, OPTIONS, TRACE
    • path. e.g.: /documentation/json
  • exporter (object or array) — Exporter configuration. If not defined, the exporter defaults to console. If an array of objects is configured, every object must be a valid exporter object. The exporter object has the following properties:
    • type (string) — Exporter type. Supported values are console, otlp, zipkin and memory (default: console). memory is only supported for testing purposes.
    • options (object) — These options are supported:
      • url (string) — The URL to send the telemetry to. Required for otlp exporter. This has no effect on console and memory exporters.
      • headers (object) — Optional headers to send with the telemetry. This has no effect on console and memory exporters.

Note that OTLP traces can be consumed by different solutions, like Jaeger. Here the full list.

Example

{
"telemetry": {
"serviceName": "test-service",
"exporter": {
"type": "otlp",
"options": {
"url": "http://localhost:4318/v1/traces"
}
}
}
}

Environment variable placeholders

The value for any configuration setting can be replaced with an environment variable +by adding a placeholder in the configuration file, for example {PLT_SERVER_LOGGER_LEVEL}.

All placeholders in a configuration must be available as an environment variable +and must meet the allowed placeholder name rules.

Example

platformatic.service.json
{
"server": {
"port": "{PORT}"
}
}

Platformatic will replace the placeholders in this example with the environment +variables of the same name.

Setting environment variables

If a .env file exists it will automatically be loaded by Platformatic using +dotenv. For example:

.env
PLT_SERVER_LOGGER_LEVEL=info
PORT=8080

The .env file must be located in the same folder as the Platformatic configuration +file or in the current working directory.

Environment variables can also be set directly on the commmand line, for example:

PLT_SERVER_LOGGER_LEVEL=debug npx platformatic composer

Allowed placeholder names

Only placeholder names prefixed with PLT_, or that are in this allow list, will be +dynamically replaced in the configuration file:

  • PORT

This restriction is to avoid accidentally exposing system environment variables. +An error will be raised by Platformatic if it finds a configuration placeholder +that isn't allowed.

The default allow list can be extended by passing a --allow-env CLI option with a +comma separated list of strings, for example:

npx platformatic composer --allow-env=HOST,SERVER_LOGGER_LEVEL

If --allow-env is passed as an option to the CLI, it will be merged with the +default allow list.

+ + + + \ No newline at end of file diff --git a/docs/reference/composer/introduction/index.html b/docs/reference/composer/introduction/index.html new file mode 100644 index 00000000000..8780b1f1fd6 --- /dev/null +++ b/docs/reference/composer/introduction/index.html @@ -0,0 +1,22 @@ + + + + + +Platformatic Composer | Platformatic Open Source Software + + + + + +
+
Version: 0.42.1

Platformatic Composer

Platformatic Composer is an HTTP server that automatically aggregates multiple +services APIs into a single API.

info

Platformatic Composer is currently in public beta.

Features

Public beta

Platformatic Composer is in public beta. You can use it in production, but it's quite +likely that you'll encounter significant bugs.

If you run into a bug or have a suggestion for improvement, please +raise an issue on GitHub.

Standalone usage

If you're only interested in the features available in Platformatic Composer, you can replace platformatic with @platformatic/composer in the dependencies of your package.json, so that you'll import fewer deps.

Example configuration file

The following configuration file can be used to start a new Platformatic +Composer project. For more details on the configuration file, see the +configuration documentation.

{
"$schema": "https://platformatic.dev/schemas/v0.26.0/composer",
"server": {
"hostname": "127.0.0.1",
"port": 0,
"logger": {
"level": "info"
}
},
"composer": {
"services": [
{
"id": "auth-service",
"origin": "https://auth-service.com",
"openapi": {
"url": "/documentation/json",
"prefix": "auth"
}
},
{
"id": "payment-service",
"origin": "https://payment-service.com",
"openapi": {
"url": "/documentation/json"
}
}
],
"refreshTimeout": 1000
},
"watch": true
}
+ + + + \ No newline at end of file diff --git a/docs/reference/composer/plugin/index.html b/docs/reference/composer/plugin/index.html new file mode 100644 index 00000000000..47a925bfe00 --- /dev/null +++ b/docs/reference/composer/plugin/index.html @@ -0,0 +1,18 @@ + + + + + +Plugin | Platformatic Open Source Software + + + + + +
+
Version: 0.42.1

Plugin

If you want to add features to a service, you will need to register a plugin, which will be in the form of a standard Fastify plugin.

The config file will specify where the plugin file is located as the example below:

{
...
"plugins": {
"paths": ["./plugin/index.js"]
}
}

The path is relative to the config file path.

You should export an async function which receives a parameters

  • app (FastifyInstance) that is the main fastify instance
  • opts all the options specified in the config file after path

Hot Reload

Plugin file is being watched by fs.watch function.

You don't need to reload Platformatic Composer server while working on your plugin. Every time you save, the watcher will trigger a reload event and the server will auto-restart and load your updated code.

tip

At this time, on Linux, file watch in subdirectories is not supported due to a Node.js limitation (documented here).

Directories

The path can also be a directory. In that case, the directory will be loaded with @fastify/autoload.

Consider the following directory structure:

├── routes
│ ├── foo
│ │ ├── something.js
│ │ └── bar
│ │ └── baz.js
│ ├── single-plugin
│ │ └── utils.js
│ └── another-plugin.js
└── platformatic.composer.json

By default the folder will be added as a prefix to all the routes defined within them. +See the autoload documentation for all the options to customize this behavior.

Multiple plugins

Multiple plugins can be loaded in parallel by specifying an array:

{
...
"plugins": {
"paths": [{
"path": "./plugin/index.js"
}, {
"path": "./routes/"
}]
}
}
+ + + + \ No newline at end of file diff --git a/docs/reference/composer/programmatic/index.html b/docs/reference/composer/programmatic/index.html new file mode 100644 index 00000000000..3017c0f98bf --- /dev/null +++ b/docs/reference/composer/programmatic/index.html @@ -0,0 +1,18 @@ + + + + + +Programmatic API | Platformatic Open Source Software + + + + + +
+
Version: 0.42.1

Programmatic API

In many cases it's useful to start Platformatic Composer using an API instead of +command line, e.g. in tests we want to start and stop our server.

The buildServer function allows that:

import { buildServer } from '@platformatic/composer'

const app = await buildServer('path/to/platformatic.composer.json')
await app.start()

const res = await fetch(app.url)
console.log(await res.json())

// do something

await app.close()

It is also possible to customize the configuration:

import { buildServer } from '@platformatic/composer'

const app = await buildServer({
server: {
hostname: '127.0.0.1',
port: 0
},
services: [
{
id: 'auth-service',
origin: 'https://auth-service.com',
openapi: {
url: '/documentation/json',
prefix: 'auth'
}
},
{
id: 'payment-service',
origin: 'https://payment-service.com',
openapi: {
file: './schemas/payment-service.json'
}
}
]
})

await app.start()

const res = await fetch(app.url)
console.log(await res.json())

// do something

await app.close()
+ + + + \ No newline at end of file diff --git a/docs/reference/db/authorization/introduction/index.html b/docs/reference/db/authorization/introduction/index.html new file mode 100644 index 00000000000..b0a0de45da3 --- /dev/null +++ b/docs/reference/db/authorization/introduction/index.html @@ -0,0 +1,21 @@ + + + + + +Authorization | Platformatic Open Source Software + + + + + +
+
Version: 0.42.1

Authorization

Introduction

Authorization in Platformatic DB is role-based. User authentication and the +assignment of roles must be handled by an external authentication service.

Configuration

Authorization strategies and rules are configured via a Platformatic DB +configuration file. See the Platformatic DB Configuration +documentation for the supported settings.

Bypass authorization in development

To make testing and developing easier, it's possible to bypass authorization checks +if an adminSecret is set. See the HTTP headers (development only) documentation.

+ + + + \ No newline at end of file diff --git a/docs/reference/db/authorization/rules/index.html b/docs/reference/db/authorization/rules/index.html new file mode 100644 index 00000000000..76adeb0ff4f --- /dev/null +++ b/docs/reference/db/authorization/rules/index.html @@ -0,0 +1,28 @@ + + + + + +Rules | Platformatic Open Source Software + + + + + +
+
Version: 0.42.1

Rules

Introduction

Authorization rules can be defined to control what operations users are +able to execute via the REST or GraphQL APIs that are exposed by a Platformatic +DB app.

Every rule must specify:

  • role (required) — A role name. It's a string and must match with the role(s) set by an external authentication service.
  • entity (optional) — The Platformatic DB entity to apply this rule to.
  • entities (optional) — The Platformatic DB entities to apply this rule to.
  • defaults (optional) — Configure entity fields that will be +automatically set from user data.
  • One entry for each supported CRUD operation: find, save, delete

One of entity and entities must be specified.

Operation checks

Every entity operation — such as find, insert, save or delete — can have +authorization checks specified for them. This value can be false (operation disabled) +or true (operation enabled with no checks).

To specify more fine-grained authorization controls, add a checks field, e.g.:

{
"role": "user",
"entity": "page",
"find": {
"checks": {
"userId": "X-PLATFORMATIC-USER-ID"
}
},
...
}

In this example, when a user with a user role executes a findPage, they can +access all the data that has userId equal to the value in user metadata with +key X-PLATFORMATIC-USER-ID.

Note that "userId": "X-PLATFORMATIC-USER-ID" is syntactic sugar for:

      "find": {
"checks": {
"userId": {
"eq": "X-PLATFORMATIC-USER-ID"
}
}
}

It's possible to specify more complex rules using all the supported where clause operators.

Note that userId MUST exist as a field in the database table to use this feature.

GraphQL events and subscriptions

Platformatic DB supports GraphQL subscriptions and therefore db-authorization must protect them. +The check is performed based on the find permissions, the only permissions that are supported are:

  1. find: false, the subscription for that role is disabled
  2. find: { checks: { [prop]: 'X-PLATFORMATIC-PROP' } } validates that the given prop is equal
  3. find: { checks: { [prop]: { eq: 'X-PLATFORMATIC-PROP' } } } validates that the given prop is equal

Conflicting rules across roles for different equality checks will not be supported.

Restrict access to entity fields

If a fields array is present on an operation, Platformatic DB restricts the columns on which the user can execute to that list. +For save operations, the configuration must specify all the not-nullable fields (otherwise, it would fail at runtime). +Platformatic does these checks at startup.

Example:

    "rule": {
"entity": "page",
"role": "user",
"find": {
"checks": {
"userId": "X-PLATFORMATIC-USER-ID"
},
"fields": ["id", "title"]
}
...
}

In this case, only id and title are returned for a user with a user role on the page entity.

Set entity fields from user metadata

Defaults are used in database insert and are default fields added automatically populated from user metadata, e.g.:

        "defaults": {
"userId": "X-PLATFORMATIC-USER-ID"
},

When an entity is created, the userId column is used and populated using the value from user metadata.

Programmatic rules

If it's necessary to have more control over the authorizations, it's possible to specify the rules programmatically, e.g.:


app.register(auth, {
jwt: {
secret: 'supersecret'
},
rules: [{
role: 'user',
entity: 'page',
async find ({ user, ctx, where }) {
return {
...where,
userId: {
eq: user['X-PLATFORMATIC-USER-ID']
}
}
},
async delete ({ user, ctx, where }) {
return {
...where,
userId: {
eq: user['X-PLATFORMATIC-USER-ID']
}
}
},
defaults: {
userId: async function ({ user, ctx, input }) {
match(user, {
'X-PLATFORMATIC-USER-ID': generated.shift(),
'X-PLATFORMATIC-ROLE': 'user'
})
return user['X-PLATFORMATIC-USER-ID']
}

},
async save ({ user, ctx, where }) {
return {
...where,
userId: {
eq: user['X-PLATFORMATIC-USER-ID']
}
}
}
}]
})

In this example, the user role can delete all the posts edited before yesterday:

 app.register(auth, {
jwt: {
secret: 'supersecret'
},
roleKey: 'X-PLATFORMATIC-ROLE',
anonymousRole: 'anonymous',
rules: [{
role: 'user',
entity: 'page',
find: true,
save: true,
async delete ({ user, ctx, where }) {
return {
...where,
editedAt: {
lt: yesterday
}
}
},
defaults: {
userId: 'X-PLATFORMATIC-USER-ID'
}
}]
})

Access validation on entity mapper for plugins

To assert that a specific user with it's role(s) has the correct access rights to use entities on a platformatic plugin the context should be passed to the entity mapper in order to verify it's permissions like this:

//plugin.js

app.post('/', async (req, reply) => {
const ctx = req.createPlatformaticCtx()

await app.platformatic.entities.movie.find({
where: { /*...*/ },
ctx
})
})

Skip authorization rules

In custom plugins, it's possible to skip the authorization rules on entities programmatically by setting the skipAuth flag to true or not passing a ctx, e.g.:

// this works even if the user's role doesn't have the `find` permission.
const result = await app.platformatic.entities.page.find({skipAuth: true, ...})

This has the same effect:

// this works even if the user's role doesn't have the `find` permission
const result = await app.platformatic.entities.page.find() // no `ctx`

This is useful for custom plugins for which the authentication is not necessary, so there is no user role set when invoked.

info

Skip authorization rules is not possible on the automatically generated REST and GraphQL APIs.

Avoid repetition of the same rule multiple times

Very often we end up writing the same rules over and over again. +Instead, it's possible to condense the rule for multiple entities on a single entry:

 app.register(auth, {
jwt: {
secret: 'supersecret'
},
roleKey: 'X-PLATFORMATIC-ROLE',
anonymousRole: 'anonymous',
rules: [{
role: 'anonymous',
entities: ['category', 'page'],
find: true,
delete: false,
save: false
}]
})
+ + + + \ No newline at end of file diff --git a/docs/reference/db/authorization/strategies/index.html b/docs/reference/db/authorization/strategies/index.html new file mode 100644 index 00000000000..762d5fa0612 --- /dev/null +++ b/docs/reference/db/authorization/strategies/index.html @@ -0,0 +1,40 @@ + + + + + +Strategies | Platformatic Open Source Software + + + + + +
+
Version: 0.42.1

Strategies

Introduction

Platformatic DB supports the following authorization strategies:

JSON Web Token (JWT)

The JSON Web Token (JWT) authorization strategy is built on top +of the @fastify/jwt Fastify plugin.

Platformatic DB JWT integration

To configure it, the quickest way is to pass a shared secret in your +Platformatic DB configuration file, for example:

platformatic.db.json
{
"authorization": {
"jwt": {
"secret": "<shared-secret>"
}
}
}

By default @fastify/jwt looks for a JWT in an HTTP request's Authorization +header. This requires HTTP requests to the Platformatic DB API to include an +Authorization header like this:

Authorization: Bearer <token>

See the @fastify/jwt documentation +for all of the available configuration options.

JSON Web Key Sets (JWKS)

The JWT authorization strategy includes support for JSON Web Key Sets.

To configure it:

platformatic.db.json
{
"authorization": {
"jwt": {
"jwks": {
"allowedDomains": [
"https://ISSUER_DOMAIN"
]
}
}
}
}

When a JSON Web Token is included in a request to Platformatic DB, it retrieves the +correct public key from https:/ISSUER_DOMAIN/.well-known/jwks.json and uses it to +verify the JWT signature. The token carries all the informations, like the kid, +which is the key id used to sign the token itself, so no other configuration is required.

JWKS can be enabled without any options:

platformatic.db.json
{
"authorization": {
"jwt": {
"jwks": true
}
}
}

When configured like this, the JWK URL is calculated from the iss (issuer) field of JWT, so +every JWT token from an issuer that exposes a valid JWKS token will pass the validation. +This configuration should only be used in development, while +in every other case the allowedDomains option should be specified.

Any option supported by the get-jwks +library can be specified in the authorization.jwt.jwks object.

JWT Custom Claim Namespace

JWT claims can be namespaced to avoid name collisions. If so, we will receive tokens +with custom claims such as: https://platformatic.dev/X-PLATFORMATIC-ROLE +(where https://platformatic.cloud/ is the namespace). +If we want to map these claims to user metadata removing our namespace, we can +specify the namespace in the JWT options:

platformatic.db.json
{
"authorization": {
"jwt": {
"namespace": "https://platformatic.dev/"
}
}
}

With this configuration, the https://platformatic.dev/X-PLATFORMATIC-ROLE claim +is mapped to X-PLATFORMATIC-ROLE user metadata.

Webhook

Platformatic DB can use a webhook to authenticate requests.

Platformatic DB Webhook integration

In this case, the URL is configured on authorization:

platformatic.db.json
{
"authorization": {
"webhook": {
"url": "<webhook url>"
}
}
}

When a request is received, Platformatic sends a POST to the webhook, replicating +the same body and headers, except for:

  • host
  • connection

In the Webhook case, the HTTP response contains the roles/user information as HTTP headers.

HTTP headers (development only)

danger

Passing an admin API key via HTTP headers is highly insecure and should only be used +during development or within protected networks.

If a request has X-PLATFORMATIC-ADMIN-SECRET HTTP header set with a valid adminSecret +(see configuration reference) the +role is set automatically as platformatic-admin, unless a different role is set for +user impersonation (which is disabled if JWT or Webhook are set, see below).

Platformatic DB HTTP Headers

Also, the following rule is automatically added to every entity, allowing the user +that presented the adminSecret to perform any operation on any entity:

{
"role": "platformatic-admin",
"find": false,
"delete": false,
"save": false
}
+ + + + \ No newline at end of file diff --git a/docs/reference/db/authorization/user-roles-metadata/index.html b/docs/reference/db/authorization/user-roles-metadata/index.html new file mode 100644 index 00000000000..b2c63506386 --- /dev/null +++ b/docs/reference/db/authorization/user-roles-metadata/index.html @@ -0,0 +1,31 @@ + + + + + +User Roles & Metadata | Platformatic Open Source Software + + + + + +
+
Version: 0.42.1

User Roles & Metadata

Introduction

Roles and user information are passed to Platformatic DB from an external +authentication service as a string (JWT claims or HTTP headers). We refer to +this data as user metadata.

Roles

Users can have a list of roles associated with them. These roles can be specified +in an X-PLATFORMATIC-ROLE property as a list of comma separated role names +(the key name is configurable).

Note that role names are just strings.

Reserved roles

Some special role names are reserved by Platformatic DB:

  • platformatic-admin : this identifies a user who has admin powers
  • anonymous: set automatically when no roles are associated

Anonymous role

If a user has no role, the anonymous role is assigned automatically. It's possible +to specify rules to apply to users with this role:

    {
"role": "anonymous",
"entity": "page",
"find": false,
"delete": false,
"save": false
}

In this case, a user that has no role or explicitly has the anonymous role +cannot perform any operations on the page entity.

Role impersonation

If a request includes a valid X-PLATFORMATIC-ADMIN-SECRET HTTP header it is +possible to impersonate a user roles. The roles to impersonate can be specified +by sending a X-PLATFORMATIC-ROLE HTTP header containing a comma separated list +of roles.

note

When JWT or Webhook are set, user role impersonation is not enabled, and the role +is always set as platfomatic-admin automatically if the X-PLATFORMATIC-ADMIN-SECRET +HTTP header is specified.

Role configuration

The roles key in user metadata defaults to X-PLATFORMATIC-ROLE. It's possible to change it using the roleKey field in configuration. Same for the anonymous role, which value can be changed using anonymousRole.

 "authorization": {
"roleKey": "X-MYCUSTOM-ROLE_KEY",
"anonymousRole": "anonym",
"rules": [
...
]
}

User metadata

User roles and other user data, such as userId, are referred to by Platformatic +DB as user metadata.

User metadata is parsed from an HTTP request and stored in a user object on the +Fastify request object. This object is populated on-demand, but it's possible +to populate it explicity with await request.setupDBAuthorizationUser().

+ + + + \ No newline at end of file diff --git a/docs/reference/db/configuration/index.html b/docs/reference/db/configuration/index.html new file mode 100644 index 00000000000..fb3f12cee31 --- /dev/null +++ b/docs/reference/db/configuration/index.html @@ -0,0 +1,59 @@ + + + + + +Configuration | Platformatic Open Source Software + + + + + +
+
Version: 0.42.1

Configuration

Platformatic DB is configured with a configuration file. It supports the use +of environment variables as setting values with configuration placeholders.

Configuration file

If the Platformatic CLI finds a file in the current working directory matching +one of these filenames, it will automatically load it:

  • platformatic.db.json
  • platformatic.db.json5
  • platformatic.db.yml or platformatic.db.yaml
  • platformatic.db.tml or platformatic.db.toml

Alternatively, a --config option with a configuration +filepath can be passed to most platformatic db CLI commands.

The configuration examples in this reference use JSON.

Supported formats

FormatExtensions
JSON.json
JSON5.json5
YAML.yml, .yaml
TOML.tml

Comments are supported by the JSON5, YAML and TOML file formats.

Settings

Configuration settings are organised into the following groups:

Sensitive configuration settings, such as a database connection URL that contains +a password, should be set using configuration placeholders.

db

A required object with the following settings:

  • connectionString (required, string) — Database connection URL.

    • Example: postgres://user:password@my-database:5432/db-name
  • schema (array of string) - Currently supported only for postgres, schemas used tolook for entities. If not provided, the default public schema is used.

    Examples

  "db": {
"connectionString": "(...)",
"schema": [
"schema1", "schema2"
],
...

},

  • Platformatic DB supports MySQL, MariaDB, PostgreSQL and SQLite.

  • graphql (boolean or object, default: true) — Controls the GraphQL API interface, with optional GraphiQL UI.

    Examples

    Enables GraphQL support

    {
    "db": {
    ...
    "graphql": true
    }
    }

    Enables GraphQL support with GraphiQL

    {
    "db": {
    ...
    "graphql": {
    "graphiql": true
    }
    }
    }

    It's possible to selectively ignore entites:

    {
    "db": {
    ...
    "graphql": {
    "ignore": {
    "categories": true
    }
    }
    }
    }

    It's possible to selectively ignore fields:

    {
    "db": {
    ...
    "graphql": {
    "ignore": {
    "categories": {
    "name": true
    }
    }
    }
    }
    }

    It's possible to add a custom GraphQL schema during the startup:

    {
    "db": {
    ...
    "graphql": {
    "schemaPath": "path/to/schema.graphql"
    }
    }
    }
    }
  • openapi (boolean or object, default: true) — Enables OpenAPI REST support.

    • If value is an object, all OpenAPI v3 allowed properties can be passed. Also a prefix property can be passed to set the OpenAPI prefix.
    • Platformatic DB uses @fastify/swagger under the hood to manage this configuration.

    Examples

    Enables OpenAPI

    {
    "db": {
    ...
    "openapi": true
    }
    }

    Enables OpenAPI with prefix

    {
    "db": {
    ...
    "openapi": {
    "prefix": "/api"
    }
    }
    }

    Enables OpenAPI with options

    {
    "db": {
    ...
    "openapi": {
    "info": {
    "title": "Platformatic DB",
    "description": "Exposing a SQL database as REST"
    }
    }
    }
    }

    You can for example add the security section, so that Swagger will allow you to add the authentication header to your requests. +In the following code snippet, we're adding a Bearer token in the form of a JWT:

    {
    "db": {
    ...
    "openapi": {
    ...
    "security": [{ "bearerAuth": [] }],
    "components": {
    "securitySchemes": {
    "bearerAuth": {
    "type": "http",
    "scheme": "bearer",
    "bearerFormat": "JWT"
    }
    }
    }
    }
    }
    }

    It's possible to selectively ignore entites:

    {
    "db": {
    ...
    "openapi": {
    "ignore": {
    "categories": true
    }
    }
    }
    }

    It's possible to selectively ignore fields:

    {
    "db": {
    ...
    "openapi": {
    "ignore": {
    "categories": {
    "name": true
    }
    }
    }
    }
    }
  • ignore (object) — Key/value object that defines which database tables should not be mapped as API entities.

    Examples

    {
    "db": {
    ...
    "ignore": {
    "versions": true // "versions" table will be not mapped with GraphQL/REST APIs
    }
    }
    }
  • events (boolean or object, default: true) — Controls the support for events published by the SQL mapping layer. +If enabled, this option add support for GraphQL Subscription over WebSocket. By default it uses an in-process message broker. +It's possible to configure it to use Redis instead.

    Examples

    {
    "db": {
    ...
    "events": {
    "connectionString": "redis://:password@redishost.com:6380/"
    }
    }
    }
  • schemalock (boolean or object, default: false) — Controls the caching of the database schema on disk. +If set to true the database schema metadata is stored inside a schema.lock file. +It's also possible to configure the location of that file by specifying a path, like so:

    Examples

    {
    "db": {
    ...
    "schemalock": {
    "path": "./dbmetadata"
    }
    }
    }

    Starting Platformatic DB or running a migration will automatically create the schemalock file.

metrics

Configuration for a Prometheus server that will export monitoring metrics +for the current server instance. It uses fastify-metrics +under the hood.

This setting can be a boolean or an object. If set to true the Prometheus server will listen on http://0.0.0.0:9090.

Supported object properties:

  • hostname (string) — The hostname where Prometheus server will listen for connections.
  • port (number) — The port where Prometheus server will listen for connections.
  • auth (object) — Basic Auth configuration. username and password are required here +(use environment variables).

migrations

Configures Postgrator to run migrations against the database.

An optional object with the following settings:

  • dir (required, string): Relative path to the migrations directory.
  • autoApply (boolean, default: false): Automatically apply migrations when Platformatic DB server starts.

plugins

An optional object that defines the plugins loaded by Platformatic DB.

  • paths (required, array): an array of paths (string) +or an array of objects composed as follows,
    • path (string): Relative path to plugin's entry point.
    • options (object): Optional plugin options.
    • encapsulate (boolean): if the path is a folder, it instruct Platformatic to not +encapsulate those plugins, +allowing decorators and hooks to be shared across all routes.
    • maxDepth (integer): if the path is a folder, it limits the depth to load the content from.
  • typescript (boolean or object): enable TypeScript compilation. A tsconfig.json file is required in the same folder.
{
"plugins": {
"paths": [{
"path": "./my-plugin.js",
"options": {
"foo": "bar"
}
}]
}
}

typescript compilation options

The typescript option can also be an object to customize the compilation. Here are the supported options:

  • enabled (boolean): enables compilation
  • tsConfig (string): path to the tsconfig.json file relative to the configuration
  • outDir (string): the output directory of tsconfig.json, in case tsconfig.json is not available +and and enabled is set to false (procution build)
  • flags (array of string): flags to be passed to tsc. Overrides tsConfig. +

Example:

{
"plugins": {
"paths": [{
"path": "./my-plugin.js",
"options": {
"foo": "bar"
}
}],
"typescript": {
"enabled": false,
"tsConfig": "./path/to/tsconfig.json",
"outDir": "dist"
}
}
}

watch

Disable watching for file changes if set to false. It can also be customized with the following options:

  • ignore (string[], default: null): List of glob patterns to ignore when watching for changes. If null or not specified, ignore rule is not applied. Ignore option doesn't work for typescript files.

  • allow (string[], default: ['*.js', '**/*.js']): List of glob patterns to allow when watching for changes. If null or not specified, allow rule is not applied. Allow option doesn't work for typescript files.

    Example

    {
    "watch": {
    "ignore": ["*.mjs", "**/*.mjs"],
    "allow": ["my-plugin.js", "plugins/*.js"]
    }
    }

server

A required object with the following settings:

  • hostname (required, string) — Hostname where Platformatic DB server will listen for connections.

  • port (required, number) — Port where Platformatic DB server will listen for connections.

  • healthCheck (boolean or object) — Enables the health check endpoint.

    • Powered by @fastify/under-pressure.
    • The value can be an object, used to specify the interval between checks in milliseconds (default: 5000)

    Example

    {
    "server": {
    ...
    "healthCheck": {
    "interval": 2000
    }
    }
    }
  • cors (object) — Configuration for Cross-Origin Resource Sharing (CORS) headers.

    • All options will be passed to the @fastify/cors plugin. In order to specify a RegExp object, you can pass { regexp: 'yourregexp' }, +it will be automatically converted
  • https (object) - Configuration for HTTPS supporting the following options.

    • key (required, string, object, or array) - If key is a string, it specifies the private key to be used. If key is an object, it must have a path property specifying the private key file. Multiple keys are supported by passing an array of keys.
    • cert (required, string, object, or array) - If cert is a string, it specifies the certificate to be used. If cert is an object, it must have a path property specifying the certificate file. Multiple certificates are supported by passing an array of keys.
  • logger (object) -- the logger configuration.

  • pluginTimeout (integer) -- the number of milliseconds to wait for a Fastify plugin to load

  • bodyLimit (integer) -- the maximum request body size in bytes

  • maxParamLength (integer) -- the maximum length of a request parameter

  • caseSensitive (boolean) -- if true, the router will be case sensitive

  • ignoreTrailingSlash (boolean) -- if true, the router will ignore the trailing slash

  • ignoreTrailingSlash (boolean) -- if true, the router will ignore the trailing slash

  • connectionTimeout (integer) -- the milliseconds to wait for a new HTTP request

  • keepAliveTimeout (integer) -- the milliseconds to wait for a keep-alive HTTP request

  • maxRequestsPerSocket (integer) -- the maximum number of requests per socket

  • forceCloseConnections (boolean or "idle") -- if true, the server will close all connections when it is closed

  • requestTimeout (integer) -- the milliseconds to wait for a request to be completed

  • disableRequestLogging (boolean) -- if true, the request logger will be disabled

  • exposeHeadRoutes (boolean) -- if true, the router will expose HEAD routes

  • serializerOpts (object) -- the serializer options

  • requestIdHeader (string or false) -- the name of the header that will contain the request id

  • requestIdLogLabel (string) -- Defines the label used for the request identifier when logging the request. default: 'reqId'

  • jsonShorthand (boolean) -- default: true -- visit fastify docs for more details

  • trustProxy (boolean or integer or string or String[]) -- default: false -- visit fastify docs for more details

tip

See the fastify docs for more details.

authorization

An optional object with the following settings:

  • adminSecret (string): A secret that should be sent in an +x-platformatic-admin-secret HTTP header when performing GraphQL/REST API +calls. Use an environment variable placeholder +to securely provide the value for this setting.
  • roleKey (string, default: X-PLATFORMATIC-ROLE): The name of the key in user +metadata that is used to store the user's roles. See Role configuration.
  • anonymousRole (string, default: anonymous): The name of the anonymous role. See Role configuration.
  • jwt (object): Configuration for the JWT authorization strategy. +Any option accepted by @fastify/jwt +can be passed in this object.
  • webhook (object): Configuration for the Webhook authorization strategy.
    • url (required, string): Webhook URL that Platformatic DB will make a +POST request to.
  • rules (array): Authorization rules that describe the CRUD actions that +users are allowed to perform against entities. See Rules +documentation.
note

If an authorization object is present, but no rules are specified, no CRUD +operations are allowed unless adminSecret is passed.

Example

platformatic.db.json
{
"authorization": {
"jwt": {
"secret": "{PLT_AUTHORIZATION_JWT_SECRET}"
},
"rules": [
...
]
}
}

telemetry

Open Telemetry is optionally supported with these settings:

  • serviceName (required, string) — Name of the service as will be reported in open telemetry.
  • version (string) — Optional version (free form)
  • skip (array). Optional list of operations to skip when exporting telemetry defined object with properties:
    • method: GET, POST, PUT, DELETE, PATCH, HEAD, OPTIONS, TRACE
    • path. e.g.: /documentation/json
  • exporter (object or array) — Exporter configuration. If not defined, the exporter defaults to console. If an array of objects is configured, every object must be a valid exporter object. The exporter object has the following properties:
    • type (string) — Exporter type. Supported values are console, otlp, zipkin and memory (default: console). memory is only supported for testing purposes.
    • options (object) — These options are supported:
      • url (string) — The URL to send the telemetry to. Required for otlp exporter. This has no effect on console and memory exporters.
      • headers (object) — Optional headers to send with the telemetry. This has no effect on console and memory exporters.

Note that OTLP traces can be consumed by different solutions, like Jaeger. Here the full list.

Example

{
"telemetry": {
"serviceName": "test-service",
"exporter": {
"type": "otlp",
"options": {
"url": "http://localhost:4318/v1/traces"
}
}
}
}

Environment variable placeholders

The value for any configuration setting can be replaced with an environment variable +by adding a placeholder in the configuration file, for example {PLT_SERVER_LOGGER_LEVEL}.

All placeholders in a configuration must be available as an environment variable +and must meet the allowed placeholder name rules.

Example

platformatic.db.json
{
"db": {
"connectionString": "{DATABASE_URL}"
},
"server": {
"logger": {
"level": "{PLT_SERVER_LOGGER_LEVEL}"
},
"port": "{PORT}"
}
}

Platformatic will replace the placeholders in this example with the environment +variables of the same name.

Setting environment variables

If a .env file exists it will automatically be loaded by Platformatic using +dotenv. For example:

.env
PLT_SERVER_LOGGER_LEVEL=info
PORT=8080

The .env file must be located in the same folder as the Platformatic configuration +file or in the current working directory.

Environment variables can also be set directly on the commmand line, for example:

PLT_SERVER_LOGGER_LEVEL=debug npx platformatic db

Allowed placeholder names

Only placeholder names prefixed with PLT_, or that are in this allow list, will be +dynamically replaced in the configuration file:

  • PORT
  • DATABASE_URL

This restriction is to avoid accidentally exposing system environment variables. +An error will be raised by Platformatic if it finds a configuration placeholder +that isn't allowed.

The default allow list can be extended by passing a --allow-env CLI option with a +comma separated list of strings, for example:

npx platformatic db start --allow-env=HOST,SERVER_LOGGER_LEVEL
# OR
npx platformatic start --allow-env=HOST,SERVER_LOGGER_LEVEL

If --allow-env is passed as an option to the CLI, it will be merged with the +default allow list.

Sample Configuration

This is a bare minimum configuration for Platformatic DB. Uses a local ./db.sqlite SQLite database, with OpenAPI and GraphQL support.

Server will listen to http://127.0.0.1:3042

{
"server": {
"hostname": "127.0.0.1",
"port": "3042"
},
"db": {
"connectionString": "sqlite://./db.sqlite",
"graphiql": true,
"openapi": true,
"graphql": true
}
}
+ + + + \ No newline at end of file diff --git a/docs/reference/db/introduction/index.html b/docs/reference/db/introduction/index.html new file mode 100644 index 00000000000..3057e0e7e91 --- /dev/null +++ b/docs/reference/db/introduction/index.html @@ -0,0 +1,25 @@ + + + + + +Platformatic DB | Platformatic Open Source Software + + + + + +
+
Version: 0.42.1

Platformatic DB

Platformatic DB is an HTTP server that provides a flexible set of tools for +building robust APIs with Node.js.

For a high level overview of how Platformatic DB works, please reference the +Architecture guide.

info

Platformatic DB is currently in public beta.

Features

info

Get up and running in 2 minutes using our +Quick Start Guide

Supported databases

DatabaseVersion
SQLite3.
PostgreSQL>= 15
MySQL>= 5.7
MariaDB>= 10.11

The required database driver is automatically inferred and loaded based on the +value of the connectionString +configuration setting.

Public beta

Platformatic DB is in public beta. You can use it in production, but it's quite +likely that you'll encounter significant bugs.

If you run into a bug or have a suggestion for improvement, please +raise an issue on GitHub.

+ + + + \ No newline at end of file diff --git a/docs/reference/db/logging/index.html b/docs/reference/db/logging/index.html new file mode 100644 index 00000000000..f0e56ee0353 --- /dev/null +++ b/docs/reference/db/logging/index.html @@ -0,0 +1,25 @@ + + + + + +Logging | Platformatic Open Source Software + + + + + +
+
Version: 0.42.1

Logging

Platformatic DB uses a low overhead logger named Pino +to output structured log messages.

Logger output level

By default the logger output level is set to info, meaning that all log messages +with a level of info or above will be output by the logger. See the +Pino documentation +for details on the supported log levels.

The logger output level can be overriden by adding a logger object to the server +configuration settings group:

platformatic.db.json
{
"server": {
"logger": {
"level": "error"
},
...
},
...
}

Log formatting

If you run Platformatic DB in a terminal, where standard out (stdout) +is a TTY:

  • pino-pretty is automatically used +to pretty print the logs and make them easier to read during development.
  • The Platformatic logo is printed (if colors are supported in the terminal emulator)

Example:

$ npx platformatic db start




/////////////
///// /////
/// ///
/// ///
/// ///
&& /// /// &&
&&&&&& /// /// &&&&&&
&&&& /// /// &&&&
&&& /// /// &&&&&&&&&&&&
&&& /// /////// //// && &&&&&
&& /// /////////////// &&&
&&& /// /// &&&
&&& /// // &&
&&& /// &&
&&& /// &&&
&&&& /// &&&
&&&&& /// &&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&
///
///
///
///
///
///


[11:20:33.466] INFO (337606): server listening
url: "http://127.0.0.1:3042"

If stdout is redirected to a non-TTY, the logo is not printed and the logs are +formatted as newline-delimited JSON:

$ npx platformatic db start | head
{"level":30,"time":1665566628973,"pid":338365,"hostname":"darkav2","url":"http://127.0.0.1:3042","msg":"server listening"}
+ + + + \ No newline at end of file diff --git a/docs/reference/db/migrations/index.html b/docs/reference/db/migrations/index.html new file mode 100644 index 00000000000..fb92fad61ae --- /dev/null +++ b/docs/reference/db/migrations/index.html @@ -0,0 +1,17 @@ + + + + + +Migrations | Platformatic Open Source Software + + + + + +
+
Version: 0.42.1

Migrations

It uses Postgrator under the hood to run migrations. Please refer to the Postgrator documentation for guidance on writing migration files.

In brief, you should create a file structure like this

migrations/
|- 001.do.sql
|- 001.undo.sql
|- 002.do.sql
|- 002.undo.sql
|- 003.do.sql
|- 003.undo.sql
|- 004.do.sql
|- 004.undo.sql
|- ... and so on

Postgrator uses a table in your schema, to store which migrations have been already processed, so that only new ones will be applied at every server start.

You can always rollback some migrations specifing what version you would like to rollback to.

Example

$ platformatic db migrations apply --to 002

Will execute 004.undo.sql, 003.undo.sql in this order. If you keep those files in migrations directory, when the server restarts it will execute 003.do.sql and 004.do.sql in this order if the autoApply value is true, or you can run the db migrations apply command.

It's also possible to rollback a single migration with -r:

$ platformatic db migrations apply -r 

How to run migrations

There are two ways to run migrations in Platformatic DB. They can be processed automatically when the server starts if the autoApply value is true, or you can just run the db migrations apply command.

In both cases you have to edit your config file to tell Platformatic DB where are your migration files.

Automatically on server start

To run migrations when Platformatic DB starts, you need to use the config file root property migrations.

There are two options in the "migrations" property

  • dir (required) the directory where the migration files are located. It will be relative to the config file path.
  • autoApply a boolean value that tells Platformatic DB to auto-apply migrations or not (default: false)

Example

{
...
"migrations": {
"dir": "./path/to/migrations/folder",
"autoApply": false
}
}

Manually with the CLI

See documentation about db migrations apply command

In short:

  • be sure to define a correct migrations.dir folder under the config on platformatic.db.json
  • get the MIGRATION_NUMBER (f.e. if the file is named 002.do.sql will be 002)
  • run npx platformatic db migrations apply --to MIGRATION_NUMBER
+ + + + \ No newline at end of file diff --git a/docs/reference/db/plugin/index.html b/docs/reference/db/plugin/index.html new file mode 100644 index 00000000000..4b1d73e94d1 --- /dev/null +++ b/docs/reference/db/plugin/index.html @@ -0,0 +1,19 @@ + + + + + +Plugin | Platformatic Open Source Software + + + + + +
+
Version: 0.42.1

Plugin

If you want to extend Platformatic DB features, it is possible to register a plugin, which will be in the form of a standard Fastify plugin.

The config file will specify where the plugin file is located as the example below:

{
...
"plugins": {
"paths": ["./plugin/index.js"]
}
}

The paths are relative to the config file path.

Once the config file is set up, you can write your plugin to extend Platformatic DB API or write your custom business logic.

You should export an async function which receives a parameters

  • app (FastifyInstance) that is the main fastify instance running Platformatic DB
  • opts all the options specified in the config file after path
  • You can always access Platformatic data mapper through app.platformatic property.
info

To make sure that a user has the appropriate set of permissions to perform any action on an entity the context should be passed to the entity mapper operation like this:

app.post('/', async (req, reply) => {
const ctx = req.createPlatformaticCtx()

await app.platformatic.entities.movies.find({
where: { /*...*/ },
ctx
})
})

Check some examples.

Hot Reload

Plugin file is being watched by fs.watch function.

You don't need to reload Platformatic DB server while working on your plugin. Every time you save, the watcher will trigger a reload event and the server will auto-restart and load your updated code.

tip

At this time, on Linux, file watch in subdirectories is not supported due to a Node.js limitation (documented here).

Directories

The path can also be a directory. In that case, the directory will be loaded with @fastify/autoload.

Consider the following directory structure:

├── routes
│ ├── foo
│ │ ├── something.js
│ │ └── bar
│ │ └── baz.js
│ ├── single-plugin
│ │ └── utils.js
│ └── another-plugin.js
└── platformatic.service.json

By default the folder will be added as a prefix to all the routes defined within them. +See the autoload documentation for all the options to customize this behavior.

Multiple plugins

Multiple plugins can be loaded in parallel by specifying an array:

{
...
"plugins": {
"paths": [{
"path": "./plugin/index.js"
}, {
"path": "./routes/"
}]
}
}

fastify.swagger()

TypeScript and autocompletion

If you want to access any of the types provided by Platformatic DB, generate them using the platformatic db types command. +This will create a global.d.ts file that you can now import everywhere, like so:

/// <references <types="./global.d.ts" />

Remember to adjust the path to global.d.ts.

Plugin definition with TypeScript

Here is an example of writing a plugin in TypeScript:

/// <reference types="./global.d.ts" />
import { FastifyInstance, FastifyPluginOptions } from 'fastify'

export default async function (fastify: FastifyInstance, opts: FastifyPluginOptions) {
}

Note that you need to add the "plugins": { "typescript": true } configuration to your platformatic.service.json.

+ + + + \ No newline at end of file diff --git a/docs/reference/db/programmatic/index.html b/docs/reference/db/programmatic/index.html new file mode 100644 index 00000000000..0f26825aa68 --- /dev/null +++ b/docs/reference/db/programmatic/index.html @@ -0,0 +1,17 @@ + + + + + +Programmatic API | Platformatic Open Source Software + + + + + +
+
Version: 0.42.1

Programmatic API

It's possible to start an instance of Platformatic DB from JavaScript.

import { buildServer } from '@platformatic/db'

const app = await buildServer('/path/to/platformatic.db.json')

await app.start() // this will start our server

console.log('URL', app.url)

const res = await fetch(app.url)
console.log(await res.json())

// do something

await app.close()

It is also possible to customize the configuration:

import { buildServer } from '@platformatic/db'

const app = await buildServer({
server: {
hostname: '127.0.0.1',
port: 0
},
db: {
connectionString: 'sqlite://test.sqlite'
},
})

await app.start() // this will start our server

console.log('URL', app.url)

const res = await fetch(app.url)
console.log(await res.json())

// do something

await app.close()

For more details on how this is implemented, read Platformatic Service Programmatic API.

API

buildServer(config)

Returns an instance of the restartable application

RestartableApp

.start()

Listen to the hostname/port combination specified in the config.

.restart()

Restart the Fastify application

.close()

Stops the application.

+ + + + \ No newline at end of file diff --git a/docs/reference/db/schema-support/index.html b/docs/reference/db/schema-support/index.html new file mode 100644 index 00000000000..ffa8ee07885 --- /dev/null +++ b/docs/reference/db/schema-support/index.html @@ -0,0 +1,21 @@ + + + + + +Schema support | Platformatic Open Source Software + + + + + +
+
Version: 0.42.1

Schema support

It's possible to specify the schemas where the tables are located (if the database supports schemas). +PlatformaticDB will inspect this schemas to create the entities

Example

CREATE SCHEMA IF NOT EXISTS "test1";
CREATE TABLE IF NOT EXISTS test1.movies (
id INTEGER PRIMARY KEY,
title TEXT NOT NULL
);

CREATE SCHEMA IF NOT EXISTS "test2";
CREATE TABLE IF NOT EXISTS test2.users (
id INTEGER PRIMARY KEY,
title TEXT NOT NULL
);

The schemas must be specified in configuration in the schema section. +Note that if we use schemas and migrations, we must specify the schema in the migrations table as well +(with postgresql, we assume we use the default public schema).

  ...
"db": {
"connectionString": "(...)",
"schema": [
"test1", "test2"
],
"ignore": {
"versions": true
}
},
"migrations": {
"dir": "migrations",
"table": "test1.versions"
},

...

The entities name are then generated in the form schemaName + entityName, PascalCase (this is necessary to avoid name collisions in case there are tables with same name in different schemas). +So for instance for the example above we generate the Test1Movie and Test2User entities.

info

Please pay attention to the entity names when using schema, these are also used to setup authorization rules

+ + + + \ No newline at end of file diff --git a/docs/reference/runtime/configuration/index.html b/docs/reference/runtime/configuration/index.html new file mode 100644 index 00000000000..18942736358 --- /dev/null +++ b/docs/reference/runtime/configuration/index.html @@ -0,0 +1,67 @@ + + + + + +Configuration | Platformatic Open Source Software + + + + + +
+
Version: 0.42.1

Configuration

Platformatic Runtime is configured with a configuration file. It supports the +use of environment variables as setting values with configuration placeholders.

Configuration file

If the Platformatic CLI finds a file in the current working directory matching +one of these filenames, it will automatically load it:

  • platformatic.runtime.json
  • platformatic.runtime.json5
  • platformatic.runtime.yml or platformatic.runtime.yaml
  • platformatic.runtime.tml or platformatic.runtime.toml

Alternatively, a --config option with a configuration +filepath can be passed to most platformatic runtime CLI commands.

The configuration examples in this reference use JSON.

Supported formats

FormatExtensions
JSON.json
JSON5.json5
YAML.yml, .yaml
TOML.tml

Comments are supported by the JSON5, YAML and TOML file formats.

Settings

Configuration settings are organized into the following groups:

Configuration settings containing sensitive data should be set using +configuration placeholders.

The autoload and services settings can be used together, but at least one +of them must be provided. When the configuration file is parsed, autoload +configuration is translated into services configuration.

autoload

The autoload configuration is intended to be used with monorepo applications. +autoload is an object with the following settings:

  • path (required, string) - The path to a directory containing the +microservices to load. In a traditional monorepo application, this directory is +typically named packages.
  • exclude (array of strings) - Child directories inside of path that +should not be processed.
  • mappings (object) - Each microservice is given an ID and is expected +to have a Platformatic configuration file. By default the ID is the +microservice's directory name, and the configuration file is expected to be a +well-known Platformatic configuration file. mappings can be used to override +these default values.
    • id (required, string) - The overridden ID. This becomes the new +microservice ID.
    • config (required**, string) - The overridden configuration file +name. This is the file that will be used when starting the microservice.

services

services is an array of objects that defines the microservices managed by the +runtime. Each service object supports the following settings:

  • id (required, string) - A unique identifier for the microservice. +When working with the Platformatic Composer, this value corresponds to the id +property of each object in the services section of the config file. When +working with client objects, this corresponds to the optional serviceId +property or the name field in the client's package.json file if a +serviceId is not explicitly provided.
  • path (required, string) - The path to the directory containing +the microservice.
  • config (required, string) - The configuration file used to start +the microservice.

entrypoint

The Platformatic Runtime's entrypoint is a microservice that is exposed +publicly. This value must be the ID of a service defined via the autoload or +services configuration.

hotReload

An optional boolean, defaulting to false, indicating if hot reloading should +be enabled for the runtime. If this value is set to false, it will disable +hot reloading for any microservices managed by the runtime. If this value is +true, hot reloading for individual microservices is managed by the +configuration of that microservice.

danger

While hot reloading is useful for development, it is not recommended for use in +production.

allowCycles

An optional boolean, defaulting to false, indicating if dependency cycles +are allowed between microservices managed by the runtime. When the Platformatic +Runtime parses the provided configuration, it examines the clients of each +microservice, as well as the services of Platformatic Composer applications to +build a dependency graph. A topological sort is performed on this dependency +graph so that each service is started after all of its dependencies have been +started. If there are cycles, the topological sort fails and the Runtime does +not start any applications.

If allowCycles is true, the topological sort is skipped, and the +microservices are started in the order specified in the configuration file.

telemetry

Open Telemetry is optionally supported with these settings:

  • serviceName (required, string) — Name of the service as will be reported in open telemetry. In the runtime case, the name of the services as reported in traces is ${serviceName}-${serviceId}, where serviceId is the id of the service in the runtime.
  • version (string) — Optional version (free form)
  • skip (array). Optional list of operations to skip when exporting telemetry defined object with properties:
    • method: GET, POST, PUT, DELETE, PATCH, HEAD, OPTIONS, TRACE
    • path. e.g.: /documentation/json
  • exporter (object or array) — Exporter configuration. If not defined, the exporter defaults to console. If an array of objects is configured, every object must be a valid exporter object. The exporter object has the following properties:
    • type (string) — Exporter type. Supported values are console, otlp, zipkin and memory (default: console). memory is only supported for testing purposes.
    • options (object) — These options are supported:
      • url (string) — The URL to send the telemetry to. Required for otlp exporter. This has no effect on console and memory exporters.
      • headers (object) — Optional headers to send with the telemetry. This has no effect on console and memory exporters.

Note that OTLP traces can be consumed by different solutions, like Jaeger. Here the full list.

Example

{
"telemetry": {
"serviceName": "test-service",
"exporter": {
"type": "otlp",
"options": {
"url": "http://localhost:4318/v1/traces"
}
}
}
}

Environment variable placeholders

The value for any configuration setting can be replaced with an environment +variable by adding a placeholder in the configuration file, for example +{PLT_ENTRYPOINT}.

All placeholders in a configuration must be available as an environment +variable and must meet the +allowed placeholder name rules.

Setting environment variables

If a .env file exists it will automatically be loaded by Platformatic using +dotenv. For example:

.env
PLT_ENTRYPOINT=service

The .env file must be located in the same folder as the Platformatic +configuration file or in the current working directory.

Environment variables can also be set directly on the commmand line, for example:

PLT_ENTRYPOINT=service npx platformatic runtime

Allowed placeholder names

Only placeholder names prefixed with PLT_, or that are in this allow list, +will be dynamically replaced in the configuration file:

  • PORT
  • DATABASE_URL

This restriction is to avoid accidentally exposing system environment variables. +An error will be raised by Platformatic if it finds a configuration placeholder +that isn't allowed.

The default allow list can be extended by passing a --allow-env CLI option +with a comma separated list of strings, for example:

npx platformatic runtime --allow-env=HOST,SERVER_LOGGER_LEVEL

If --allow-env is passed as an option to the CLI, it will be merged with the +default allow list.

+ + + + \ No newline at end of file diff --git a/docs/reference/runtime/introduction/index.html b/docs/reference/runtime/introduction/index.html new file mode 100644 index 00000000000..0566d36eae8 --- /dev/null +++ b/docs/reference/runtime/introduction/index.html @@ -0,0 +1,33 @@ + + + + + +Platformatic Runtime | Platformatic Open Source Software + + + + + +
+
Version: 0.42.1

Platformatic Runtime

Platformatic Runtime is an environment for running multiple Platformatic +microservices as a single monolithic deployment unit.

info

Platformatic Runtime is currently in public beta.

Features

Public beta

Platformatic Runtime is in public beta. You can use it in production, but it's quite +likely that you'll encounter significant bugs.

If you run into a bug or have a suggestion for improvement, please +raise an issue on GitHub.

Standalone usage

If you're only interested in the features available in Platformatic Runtime, you can replace platformatic with @platformatic/runtime in the dependencies of your package.json, so that you'll import fewer deps.

Example configuration file

The following configuration file can be used to start a new Platformatic +Runtime project. For more details on the configuration file, see the +configuration documentation.

{
"$schema": "https://platformatic.dev/schemas/v0.26.0/runtime",
"autoload": {
"path": "./packages",
"exclude": ["docs"]
},
"entrypoint": "entrypointApp"
}

TypeScript Compilation

Platformatic Runtime streamlines the compilation of all services built on TypeScript with the command +plt runtime compile. The TypeScript compiler (tsc) is required to be installed separately.

Interservice communication

The Platformatic Runtime allows multiple microservice applications to run +within a single process. Only the entrypoint binds to an operating system +port and can be reached from outside of the runtime.

Within the runtime, all interservice communication happens by injecting HTTP +requests into the running servers, without binding them to ports. This injection +is handled by +fastify-undici-dispatcher.

Each microservice is assigned an internal domain name based on its unique ID. +For example, a microservice with the ID awesome is given the internal domain +of http://awesome.plt.local. The fastify-undici-dispatcher module maps that +domain to the Fastify server running the awesome microservice. Any Node.js +APIs based on Undici, such as fetch(), will then automatically route requests +addressed to awesome.plt.local to the corresponding Fastify server.

+ + + + \ No newline at end of file diff --git a/docs/reference/runtime/programmatic/index.html b/docs/reference/runtime/programmatic/index.html new file mode 100644 index 00000000000..096a499b08c --- /dev/null +++ b/docs/reference/runtime/programmatic/index.html @@ -0,0 +1,28 @@ + + + + + +Programmatic API | Platformatic Open Source Software + + + + + +
+
Version: 0.42.1

Programmatic API

In many cases it's useful to start Platformatic applications using an API +instead of the command line. The @platformatic/runtime API makes it simple to +work with different application types (e.g. service, db, composer and runtime) without +needing to know the application type a priori.

buildServer()

The buildServer function creates a server from a provided configuration +object or configuration filename. +The config can be of either Platformatic Service, Platformatic DB, +Platformatic Composer or any other application built on top of +Platformatic Service.

import { buildServer } from '@platformatic/runtime'

const app = await buildServer('path/to/platformatic.runtime.json')
const entrypointUrl = await app.start()

// Make a request to the entrypoint.
const res = await fetch(entrypointUrl)
console.log(await res.json())

// Do other interesting things.

await app.close()

It is also possible to customize the configuration:

import { buildServer } from '@platformatic/runtime'

const config = {
// $schema: 'https://platformatic.dev/schemas/v0.39.0/runtime',
// $schema: 'https://platformatic.dev/schemas/v0.39.0/service',
// $schema: 'https://platformatic.dev/schemas/v0.39.0/db',
// $schema: 'https://platformatic.dev/schemas/v0.39.0/composer'
...
}
const app = await buildServer(config)

await app.start()

loadConfig()

The loadConfig function is used to read and parse a configuration file for +an arbitrary Platformatic application.

import { loadConfig } from '@platformatic/runtime'

// Read the config based on command line arguments. loadConfig() will detect
// the application type.
const config = await loadConfig({}, ['-c', '/path/to/platformatic.config.json'])

// Read the config based on command line arguments. The application type can
// be provided explicitly.
const config = await loadConfig(
{},
['-c', '/path/to/platformatic.config.json']
)

// Default config can be specified.
const config = await loadConfig(
{},
['-c', '/path/to/platformatic.config.json'],
{ key: 'value' }
)

start()

The start function loads a configuration, builds a server, and starts the +server. However, the server is not returned.

import { start } from '@platformatic/runtime'

await start(['-c', '/path/to/platformatic.config.json])

startCommand()

The startCommand function is similar to start. However, if an exception +occurs, startCommand logs the error and exits the process. This is different +from start, which throws the exception.

import { startCommand } from '@platformatic/runtime'

await startCommand(['-c', '/path/to/platformatic.config.json])
+ + + + \ No newline at end of file diff --git a/docs/reference/service/configuration/index.html b/docs/reference/service/configuration/index.html new file mode 100644 index 00000000000..78ec333bbbe --- /dev/null +++ b/docs/reference/service/configuration/index.html @@ -0,0 +1,38 @@ + + + + + +Configuration | Platformatic Open Source Software + + + + + +
+
Version: 0.42.1

Configuration

Platformatic Service configured with a configuration file. It supports the use +of environment variables as setting values with configuration placeholders.

Configuration file

If the Platformatic CLI finds a file in the current working directory matching +one of these filenames, it will automatically load it:

  • platformatic.service.json
  • platformatic.service.json5
  • platformatic.service.yml or platformatic.service.yaml
  • platformatic.service.tml or platformatic.service.toml

Alternatively, a --config option with a configuration +filepath can be passed to most platformatic service CLI commands.

The configuration examples in this reference use JSON.

Supported formats

FormatExtensions
JSON.json
JSON5.json5
YAML.yml, .yaml
TOML.tml

Comments are supported by the JSON5, YAML and TOML file formats.

Settings

Configuration settings are organised into the following groups:

Sensitive configuration settings, such as a database connection URL that contains +a password, should be set using configuration placeholders.

server

A required object with the following settings:

  • hostname (required, string) — Hostname where Platformatic Service server will listen for connections.

  • port (required, number) — Port where Platformatic Service server will listen for connections.

  • healthCheck (boolean or object) — Enables the health check endpoint.

    • Powered by @fastify/under-pressure.
    • The value can be an object, used to specify the interval between checks in milliseconds (default: 5000)

    Example

    {
    "server": {
    ...
    "healthCheck": {
    "interval": 2000
    }
    }
    }
  • cors (object) — Configuration for Cross-Origin Resource Sharing (CORS) headers.

    • All options will be passed to the @fastify/cors plugin. In order to specify a RegExp object, you can pass { regexp: 'yourregexp' }, +it will be automatically converted.
  • logger (object) -- the logger configuration.

  • pluginTimeout (integer) -- the number of milliseconds to wait for a Fastify plugin to load, see the fastify docs for more details.

  • https (object) - Configuration for HTTPS supporting the following options.

    • key (required, string, object, or array) - If key is a string, it specifies the private key to be used. If key is an object, it must have a path property specifying the private key file. Multiple keys are supported by passing an array of keys.
    • cert (required, string, object, or array) - If cert is a string, it specifies the certificate to be used. If cert is an object, it must have a path property specifying the certificate file. Multiple certificates are supported by passing an array of keys.

metrics

Configuration for a Prometheus server that will export monitoring metrics +for the current server instance. It uses fastify-metrics +under the hood.

This setting can be a boolean or an object. If set to true the Prometheus server will listen on http://0.0.0.0:9090.

Supported object properties:

  • hostname (string) — The hostname where Prometheus server will listen for connections.
  • port (number) — The port where Prometheus server will listen for connections.
  • auth (object) — Basic Auth configuration. username and password are required here +(use environment variables).

plugins

An optional object that defines the plugins loaded by Platformatic Service.

  • paths (required, array): an array of paths (string) +or an array of objects composed as follows,
    • path (string): Relative path to plugin's entry point.
    • options (object): Optional plugin options.
    • encapsulate (boolean): if the path is a folder, it instruct Platformatic to not encapsulate those plugins.
    • maxDepth (integer): if the path is a folder, it limits the depth to load the content from.
  • typescript (boolean or object): enable TypeScript compilation. A tsconfig.json file is required in the same folder.

Example

{
"plugins": {
"paths": [{
"path": "./my-plugin.js",
"options": {
"foo": "bar"
}
}]
}
}

typescript compilation options

The typescript can also be an object to customize the compilation. Here are the supported options:

  • enabled (boolean): enables compilation
  • tsConfig (string): path to the tsconfig.json file relative to the configuration
  • outDir (string): the output directory of tsconfig.json, in case tsconfig.json is not available +and and enabled is set to false (procution build)
  • flags (array of string): flags to be passed to tsc. Overrides tsConfig. +

Example:

{
"plugins": {
"paths": [{
"path": "./my-plugin.js",
"options": {
"foo": "bar"
}
}],
"typescript": {
"enabled": false,
"tsConfig": "./path/to/tsconfig.json",
"outDir": "dist"
}
}
}

watch

Disable watching for file changes if set to false. It can also be customized with the following options:

  • ignore (string[], default: null): List of glob patterns to ignore when watching for changes. If null or not specified, ignore rule is not applied. Ignore option doesn't work for typescript files.

  • allow (string[], default: ['*.js', '**/*.js']): List of glob patterns to allow when watching for changes. If null or not specified, allow rule is not applied. Allow option doesn't work for typescript files.

    Example

    {
    "watch": {
    "ignore": ["*.mjs", "**/*.mjs"],
    "allow": ["my-plugin.js", "plugins/*.js"]
    }
    }

service

Configure @platformatic/service specific settings such as graphql or openapi:

  • graphql (boolean or object, default: false) — Controls the GraphQL API interface, with optional GraphiQL UI.

    Examples

    Enables GraphQL support

    {
    "service": {
    "graphql": true
    }
    }

    Enables GraphQL support with GraphiQL

    {
    "service": {
    "graphql": {
    "graphiql": true
    }
    }
    }
  • openapi (boolean or object, default: false) — Enables OpenAPI REST support.

    • If value is an object, all OpenAPI v3 allowed properties can be passed. Also a prefix property can be passed to set the OpenAPI prefix.
    • Platformatic Service uses @fastify/swagger under the hood to manage this configuration.

    Examples

    Enables OpenAPI

    {
    "service": {
    ...
    "openapi": true
    }
    }

    Enables OpenAPI with prefix

    {
    "service": {
    "openapi": {
    "prefix": "/api"
    }
    }
    }

    Enables OpenAPI with options

    {
    "service": {
    "openapi": {
    "info": {
    "title": "Platformatic Service",
    "description": "Exposing a SQL database as REST"
    }
    }
    }
    }

telemetry

Open Telemetry is optionally supported with these settings:

  • serviceName (required, string) — Name of the service as will be reported in open telemetry.
  • version (string) — Optional version (free form)
  • skip (array). Optional list of operations to skip when exporting telemetry defined object with properties:
    • method: GET, POST, PUT, DELETE, PATCH, HEAD, OPTIONS, TRACE
    • path. e.g.: /documentation/json
  • exporter (object or array) — Exporter configuration. If not defined, the exporter defaults to console. If an array of objects is configured, every object must be a valid exporter object. The exporter object has the following properties:
    • type (string) — Exporter type. Supported values are console, otlp, zipkin and memory (default: console). memory is only supported for testing purposes.
    • options (object) — These options are supported:
      • url (string) — The URL to send the telemetry to. Required for otlp exporter. This has no effect on console and memory exporters.
      • headers (object) — Optional headers to send with the telemetry. This has no effect on console and memory exporters.

Note that OTLP traces can be consumed by different solutions, like Jaeger. Here the full list.

Example

{
"telemetry": {
"serviceName": "test-service",
"exporter": {
"type": "otlp",
"options": {
"url": "http://localhost:4318/v1/traces"
}
}
}
}

Environment variable placeholders

The value for any configuration setting can be replaced with an environment variable +by adding a placeholder in the configuration file, for example {PLT_SERVER_LOGGER_LEVEL}.

All placeholders in a configuration must be available as an environment variable +and must meet the allowed placeholder name rules.

Example

platformatic.service.json
{
"server": {
"port": "{PORT}"
}
}

Platformatic will replace the placeholders in this example with the environment +variables of the same name.

Setting environment variables

If a .env file exists it will automatically be loaded by Platformatic using +dotenv. For example:

.env
PLT_SERVER_LOGGER_LEVEL=info
PORT=8080

The .env file must be located in the same folder as the Platformatic configuration +file or in the current working directory.

Environment variables can also be set directly on the commmand line, for example:

PLT_SERVER_LOGGER_LEVEL=debug npx platformatic service

Allowed placeholder names

Only placeholder names prefixed with PLT_, or that are in this allow list, will be +dynamically replaced in the configuration file:

  • PORT
  • DATABASE_URL

This restriction is to avoid accidentally exposing system environment variables. +An error will be raised by Platformatic if it finds a configuration placeholder +that isn't allowed.

The default allow list can be extended by passing a --allow-env CLI option with a +comma separated list of strings, for example:

npx platformatic service --allow-env=HOST,SERVER_LOGGER_LEVEL

If --allow-env is passed as an option to the CLI, it will be merged with the +default allow list.

+ + + + \ No newline at end of file diff --git a/docs/reference/service/introduction/index.html b/docs/reference/service/introduction/index.html new file mode 100644 index 00000000000..5f168c83d61 --- /dev/null +++ b/docs/reference/service/introduction/index.html @@ -0,0 +1,21 @@ + + + + + +Platformatic Service | Platformatic Open Source Software + + + + + +
+
Version: 0.42.1

Platformatic Service

Platformatic Service is an HTTP server that provides a developer tools for +building robust APIs with Node.js.

For a high level overview of how Platformatic DB works, please reference the +Architecture guide.

info

Platformatic Service is currently in public beta.

Features

Public beta

Platformatic Service is in public beta. You can use it in production, but it's quite +likely that you'll encounter significant bugs.

If you run into a bug or have a suggestion for improvement, please +raise an issue on GitHub.

Standalone usage

If you're only interested in the features available in Platformatic Service, you can simply switch platformatic with @platformatic/service in the dependencies of your package.json, so that you'll only import fewer deps.

You can use the plt-service command, it's the equivalent of plt service.

+ + + + \ No newline at end of file diff --git a/docs/reference/service/plugin/index.html b/docs/reference/service/plugin/index.html new file mode 100644 index 00000000000..0900cb506d7 --- /dev/null +++ b/docs/reference/service/plugin/index.html @@ -0,0 +1,21 @@ + + + + + +Plugin | Platformatic Open Source Software + + + + + +
+
Version: 0.42.1

Plugin

If you want to add features to a service, you will need to register a plugin, which will be in the form of a standard Fastify plugin.

The config file will specify where the plugin file is located as the example below:

{
...
"plugins": {
"paths": ["./plugin/index.js"]
}
}

The path is relative to the config file path.

You should export an async function which receives a parameters

  • app (FastifyInstance) that is the main fastify instance
  • opts all the options specified in the config file after path

Hot Reload

Plugin file is being watched by fs.watch function.

You don't need to reload Platformatic Service server while working on your plugin. Every time you save, the watcher will trigger a reload event and the server will auto-restart and load your updated code.

tip

At this time, on Linux, file watch in subdirectories is not supported due to a Node.js limitation (documented here).

Directories

The path can also be a directory. In that case, the directory will be loaded with @fastify/autoload.

Consider the following directory structure:

├── routes
│ ├── foo
│ │ ├── something.js
│ │ └── bar
│ │ └── baz.js
│ ├── single-plugin
│ │ └── utils.js
│ └── another-plugin.js
└── platformatic.service.json

By default the folder will be added as a prefix to all the routes defined within them. +See the autoload documentation for all the options to customize this behavior.

Multiple plugins

Multiple plugins can be loaded in parallel by specifying an array:

{
...
"plugins": {
"paths": [{
"path": "./plugin/index.js"
}, {
"path": "./routes/"
}]
}
}

TypeScript and Autocompletion

In order to provide the correct typings of the features added by Platformatic Service to your Fastify instance, +add the following at the top of your files:

/// <references types="@platformatic/service" />

Plugin definition with TypeScript

Here is an example of writing a plugin in TypeScript:

/// <reference types="@platformatic/service" />
import { FastifyInstance, FastifyPluginOptions } from 'fastify'

export default async function (fastify: FastifyInstance, opts: FastifyPluginOptions) {
}

Note that you need to add the "typescript": true configuration to your platformatic.service.json.

Loading compiled files

Setting "typescript": false but including a tsconfig.json with an outDir +option, will instruct Platformatic Service to try loading your plugins from that folder instead. +This setup is needed to support pre-compiled sources to reduce cold start time during deployment.

+ + + + \ No newline at end of file diff --git a/docs/reference/service/programmatic/index.html b/docs/reference/service/programmatic/index.html new file mode 100644 index 00000000000..8441da51388 --- /dev/null +++ b/docs/reference/service/programmatic/index.html @@ -0,0 +1,19 @@ + + + + + +Programmatic API | Platformatic Open Source Software + + + + + +
+
Version: 0.42.1

Programmatic API

In many cases it's useful to start Platformatic Service using an API instead of +command line, e.g. in tests we want to start and stop our server.

The buildServer function allows that:

import { buildServer } from '@platformatic/service'

const app = await buildServer('path/to/platformatic.service.json')

await app.start()

const res = await fetch(app.url)
console.log(await res.json())

// do something

await app.close()

It is also possible to customize the configuration:

import { buildServer } from '@platformatic/service'

const app = await buildServer({
server: {
hostname: '127.0.0.1',
port: 0
}
})

await app.start()

const res = await fetch(app.url)
console.log(await res.json())

// do something

await app.close()

Creating a reusable application on top of Platformatic Service

Platformatic DB is built on top of Platformatic Serivce. +If you want to build a similar kind of tool, follow this example:

import { buildServer, schema } from '@platformatic/service'

async function myPlugin (app, opts) {
// app.platformatic.configManager contains an instance of the ConfigManager
console.log(app.platformatic.configManager.current)

await platformaticService(app, opts)
}

// break Fastify encapsulation
myPlugin[Symbol.for('skip-override')] = true
myPlugin.configType = 'myPlugin'

// This is the schema for this reusable application configuration file,
// customize at will but retain the base properties of the schema from
// @platformatic/service
myPlugin.schema = schema

// The configuration of the ConfigManager
myPlugin.configManagerConfig = {
schema: foo.schema,
envWhitelist: ['PORT', 'HOSTNAME'],
allowToWatch: ['.env'],
schemaOptions: {
useDefaults: true,
coerceTypes: true,
allErrors: true,
strict: false
},
async transformConfig () {
console.log(this.current) // this is the current config

// In this method you can alter the configuration before the application
// is started. It's useful to apply some defaults that cannot be derived
// inside the schema, such as resolving paths.
}
}


const server = await buildServer('path/to/config.json', myPlugin)

await server.start()

const res = await fetch(server.listeningOrigin)
console.log(await res.json())

// do something

await service.close()
+ + + + \ No newline at end of file diff --git a/docs/reference/sql-events/fastify-plugin/index.html b/docs/reference/sql-events/fastify-plugin/index.html new file mode 100644 index 00000000000..43e328e708a --- /dev/null +++ b/docs/reference/sql-events/fastify-plugin/index.html @@ -0,0 +1,19 @@ + + + + + +Fastify Plugin | Platformatic Open Source Software + + + + + +
+
Version: 0.42.1

Fastify Plugin

The @platformatic/sql-events package exports a Fastify plugin that can be used out-of the box in a server application. +It requires that @platformatic/sql-mapper is registered before it.

The plugin has the following options:

The plugin adds the following properties to the app.platformatic object:

  • mq — an instance of mqemitter
  • subscribe(topics) — a method to create a node Readable +that will contain the events emitted by those topics.

Each entities of app.platformatic.entities will be augmented with two functions:

  • entity.getPublishTopic({ ctx, data, action })
  • entity.getSubscriptionTopic({ ctx, action })

Where ctx is the GraphQL Context, data is the object that will be emitted and action is either save or delete.

Usage

'use strict'

const Fastify = require('fastify')
const mapper = require('@platformatic/sql-mapper')
const events = require('@platformatic/sql-events')

async function main() {
const app = Fastify({
logger: {
level: 'info'
}
})
app.register(mapper.plugin, {
connectionString: 'postgres://postgres:postgres@127.0.0.1/postgres'
})

app.register(events)

// setup your routes


await app.listen({ port: 3333 })
}

main()
+ + + + \ No newline at end of file diff --git a/docs/reference/sql-events/introduction/index.html b/docs/reference/sql-events/introduction/index.html new file mode 100644 index 00000000000..64717ac45f4 --- /dev/null +++ b/docs/reference/sql-events/introduction/index.html @@ -0,0 +1,21 @@ + + + + + +Introduction to the sql-events module | Platformatic Open Source Software + + + + + +
+
Version: 0.42.1

Introduction to the sql-events module

The Platformatic DB sql-events uses mqemitter to publish events when entities are saved and deleted.

These events are useful to distribute updates to clients, e.g. via WebSocket, Server-Sent Events, or GraphQL Subscritions. +When subscribing and using a multi-process system with a broker like Redis, a subscribed topic will receive the data from all +the other processes.

They are not the right choice for executing some code whenever an entity is created, modified or deleted, in that case +use @platformatic/sql-mapper hooks.

Install

You can use together with @platformatic/sql-mapper.

npm i @platformatic/sql-mapper @platformatic/sql-events

Usage

const { connect } = require('@platformatic/sql-mapper')
const { setupEmitter } = require('@platformatic/sql-events')
const { pino } = require('pino')

const log = pino()

async function onDatabaseLoad (db, sql) {
await db.query(sql`CREATE TABLE pages (
id SERIAL PRIMARY KEY,
title VARCHAR(255) NOT NULL
);`)
}
const connectionString = 'postgres://postgres:postgres@localhost:5432/postgres'
const mapper = await connect({
connectionString,
log,
onDatabaseLoad,
ignore: {},
hooks: {
Page: {
find: async function(_find, opts) {
console.log('hook called');
return await _find(opts)
}
}
}
})

setupEmitter({ mapper, log })

const pageEntity = mapper.entities.page

const queue = await mapper.subscribe([
pageEntity.getSubscriptionTopic({ action: 'save' }),
pageEntity.getSubscriptionTopic({ action: 'delete' })
])

const page = await pageEntity.save({
input: { title: 'fourth page' }
})

const page2 = await pageEntity.save({
input: {
id: page.id,
title: 'fifth page'
}
})

await pageEntity.delete({
where: {
id: {
eq: page.id
}
},
fields: ['id', 'title']
})

for await (const ev of queue) {
console.log(ev)
if (expected.length === 0) {
break
}
}

process.exit(0)

API

The setupEmitter function has the following options:

The setupEmitter functions adds the following properties to the mapper object:

  • mq — an instance of mqemitter
  • subscribe(topics) — a method to create a node Readable +that will contain the events emitted by those topics.

Each entities of app.platformatic.entities will be augmented with two functions:

  • entity.getPublishTopic({ ctx, data, action })
  • entity.getSubscriptionTopic({ ctx, action })

Where ctx is the GraphQL Context, data is the object that will be emitted and action is either save or delete.

+ + + + \ No newline at end of file diff --git a/docs/reference/sql-graphql/ignore/index.html b/docs/reference/sql-graphql/ignore/index.html new file mode 100644 index 00000000000..d36a27a4d41 --- /dev/null +++ b/docs/reference/sql-graphql/ignore/index.html @@ -0,0 +1,17 @@ + + + + + +Ignoring types and fields | Platformatic Open Source Software + + + + + + + + + + \ No newline at end of file diff --git a/docs/reference/sql-graphql/introduction/index.html b/docs/reference/sql-graphql/introduction/index.html new file mode 100644 index 00000000000..62afbaeea21 --- /dev/null +++ b/docs/reference/sql-graphql/introduction/index.html @@ -0,0 +1,21 @@ + + + + + +Introduction to the GraphQL API | Platformatic Open Source Software + + + + + +
+
Version: 0.42.1

Introduction to the GraphQL API

The Platformatic DB GraphQL plugin starts a GraphQL server wand makes it available +via a /graphql endpoint. This endpoint is automatically ready to run queries and +mutations against your entities. This functionality is powered by +Mercurius.

GraphiQL

The GraphiQL web UI is integrated into +Platformatic DB. To enable it you can pass an option to the sql-graphql plugin:

app.register(graphqlPlugin, { graphiql: true })

The GraphiQL interface is made available under the /graphiql path.

+ + + + \ No newline at end of file diff --git a/docs/reference/sql-graphql/many-to-many/index.html b/docs/reference/sql-graphql/many-to-many/index.html new file mode 100644 index 00000000000..b4b8a3b4751 --- /dev/null +++ b/docs/reference/sql-graphql/many-to-many/index.html @@ -0,0 +1,20 @@ + + + + + +Many To Many Relationship | Platformatic Open Source Software + + + + + +
+
Version: 0.42.1

Many To Many Relationship

Many-to-Many relationship lets you relate each row in one table to many rows in +another table and vice versa.

Many-to-many relationship are implemented in SQL via a "join table", a table whose primary key +is composed by the identifier of the two parts of the many-to-many relationship.

Platformatic DB fully support many-to-many relationships on all supported databases.

Example

Consider the following schema (SQLite):

CREATE TABLE pages (
id INTEGER PRIMARY KEY,
the_title VARCHAR(42)
);

CREATE TABLE users (
id INTEGER PRIMARY KEY,
username VARCHAR(255) NOT NULL
);

CREATE TABLE editors (
page_id INTEGER NOT NULL,
user_id INTEGER NOT NULL,
role VARCHAR(255) NOT NULL,
CONSTRAINT fk_editor_pages FOREIGN KEY (page_id) REFERENCES pages(id),
CONSTRAINT fk_editor_users FOREIGN KEY (user_id) REFERENCES users(id),
PRIMARY KEY (page_id, user_id)
);

The table editors is a "join table" between users and pages. +Given this schema, you could issue queries like:

query {
editors(orderBy: { field: role, direction: DESC }) {
user {
id
username
}
page {
id
theTitle
}
role
}
}

Mutation works exactly the same as before:

mutation {
saveEditor(input: { userId: "1", pageId: "1", role: "captain" }) {
user {
id
username
}
page {
id
theTitle
}
role
}
}
+ + + + \ No newline at end of file diff --git a/docs/reference/sql-graphql/mutations/index.html b/docs/reference/sql-graphql/mutations/index.html new file mode 100644 index 00000000000..2a010ffd414 --- /dev/null +++ b/docs/reference/sql-graphql/mutations/index.html @@ -0,0 +1,20 @@ + + + + + +Mutations | Platformatic Open Source Software + + + + + +
+
Version: 0.42.1

Mutations

When the GraphQL plugin is loaded, some mutations are automatically adding to +the GraphQL schema.

save[ENTITY]

Saves a new entity to the database or updates an existing entity. +This actually behaves as an upsert, allowing both behaviours depending on the presence of the primary key field.

Example

'use strict'

const Fastify = require('fastify')
const graphqlPlugin = require('@platformatic/sql-graphql')
const sqlMapper = require('@platformatic/sql-mapper')

async function main() {
const app = Fastify({
logger: {
level: 'info'
}
})
app.register(sqlMapper, {
connectionString: 'postgres://postgres:postgres@127.0.0.1/postgres',
log: logger,
})
app.register(graphqlPlugin, {
graphiql: true
})
const res = await app.inject({
method: 'POST',
url: '/graphql',
body: {
query: `
mutation {
savePage(input: { id: 3 title: "Platformatic is cool!" }) {
id
title
}
}
`
}
})
const result = await res.json()
console.log(result.data) // { savePage: { id: '3', title: 'Platformatic is cool!' } }
await app.close()
}

main()

insert[ENTITY]

Inserts a new entity in the database.

Example

'use strict'

const Fastify = require('fastify')
const graphqlPlugin = require('@platformatic/sql-graphql')
const sqlMapper = require('@platformatic/sql-mapper')

async function main() {
const app = Fastify({
logger: {
level: 'info'
}
})
app.register(sqlMapper, {
connectionString: 'postgres://postgres:postgres@127.0.0.1/postgres',
log: logger,
})
app.register(graphqlPlugin, {
graphiql: true
})
const res = await app.inject({
method: 'POST',
url: '/graphql',
body: {
query: `
mutation {
savePage(input: { title: "Platformatic is cool!" }) {
id
title
}
}
`
}
})
const result = await res.json()
console.log(result.data) // { savePage: { id: '4', title: 'Platformatic is cool!' } }
await app.close()
}

main()

delete[ENTITIES]

Deletes one or more entities from the database, based on the where clause +passed as an input to the mutation.

Example

'use strict'

const Fastify = require('fastify')
const graphqlPlugin = require('@platformatic/sql-graphql')
const sqlMapper = require('@platformatic/sql-mapper')

async function main() {
const app = Fastify({
logger: {
level: 'info'
}
})
app.register(sqlMapper, {
connectionString: 'postgres://postgres:postgres@127.0.0.1/postgres',
log: logger,
})
app.register(graphqlPlugin, {
graphiql: true
})
const res = await app.inject({
method: 'POST',
url: '/graphql',
body: {
query: `
mutation {
deletePages(where: { id: { eq: "3" } }) {
id
title
}
}
`
}
})
const result = await res.json()
console.log(result.data) // { deletePages: [ { id: '3', title: 'Platformatic is cool!' } ] }
await app.close()
}

main()
+ + + + \ No newline at end of file diff --git a/docs/reference/sql-graphql/queries/index.html b/docs/reference/sql-graphql/queries/index.html new file mode 100644 index 00000000000..e63ecb89f0a --- /dev/null +++ b/docs/reference/sql-graphql/queries/index.html @@ -0,0 +1,21 @@ + + + + + +Queries | Platformatic Open Source Software + + + + + +
+
Version: 0.42.1

Queries

A GraphQL query is automatically added to the GraphQL schema for each database +table, along with a complete mapping for all table fields.

Example

'use strict'

const Fastify = require('fastify')
const graphqlPlugin = require('@platformatic/sql-graphql')
const sqlMapper = require('@platformatic/sql-mapper')
async function main() {
const app = Fastify({
logger: {
level: 'info'
}
})
app.register(sqlMapper, {
connectionString: 'postgres://postgres:postgres@127.0.0.1/postgres'
})
app.register(graphqlPlugin, {
graphiql: true
})
const res = await app.inject({
method: 'POST',
url: '/graphql',
body: {
query: `
query{
pages{
id,
title
}
}
`
}
})
const result = await res.json()
console.log(result.data)
await app.close()
}
main()

Advanced Queries

The following additional queries are added to the GraphQL schema for each entity:

get[ENTITY]by[PRIMARY_KEY]

If you have a table pages with the field id as the primary key, you can run +a query called getPageById.

Example

...
const res = await app.inject({
method: 'POST',
url: '/graphql',
body: {
query: `
query{
getPageById(id: 3) {
id,
title
}
}
`
}
})
const result = await res.json()
console.log(result.data) // { getPageById: { id: '3', title: 'A fiction' } }

count[ENTITIES]

...
const res = await app.inject({
method: 'POST',
url: '/graphql',
body: {
query: `
query {
countPages {
total
}
}
`
}
})
const result = await res.json()
console.log(result.data) // { countMovies : { total: { 17 } }

Pagination

The Platformatic DB supports for result's pagination through input parameters: limit and offset

Example

{
users(limit:5, offset: 10) {
name
}
}

It returns 5 users starting from position 10.

Limit

By default a limit value (10) is applied to each request.

Clients can override this behavior by passing a value. +In this case the server validates the input and an error is return if exceeds the max accepted value (100).

Limit's values can be customized through configuration:

{
...
"db": {
...
"limit": {
"default": 50,
"max": 1000
}
}
}

Limit only accepts values >= 0. Otherwise an error is return.

Offset

By default offset is not applied to the request. +Clients can override this behavior by passing a value.

Offset only accepts values >= 0. Otherwise an error is return.

+ + + + \ No newline at end of file diff --git a/docs/reference/sql-graphql/subscriptions/index.html b/docs/reference/sql-graphql/subscriptions/index.html new file mode 100644 index 00000000000..5288d800db3 --- /dev/null +++ b/docs/reference/sql-graphql/subscriptions/index.html @@ -0,0 +1,19 @@ + + + + + +Subscription | Platformatic Open Source Software + + + + + +
+
Version: 0.42.1

Subscription

When the GraphQL plugin is loaded, some subscriptions are automatically adding to +the GraphQL schema if the @platformatic/sql-events plugin has been previously registered.

It's possible to avoid creating the subscriptions for a given entity by adding the subscriptionIgnore config, +like so: subscriptionIgnore: ['page'].

[ENTITY]Saved

Published whenever an entity is saved, e.g. when the mutation insert[ENTITY] or save[ENTITY] are called.

[ENTITY]Deleted

Published whenever an entity is deleted, e.g. when the mutation delete[ENTITY] is called..

+ + + + \ No newline at end of file diff --git a/docs/reference/sql-mapper/entities/api/index.html b/docs/reference/sql-mapper/entities/api/index.html new file mode 100644 index 00000000000..c5df3e20743 --- /dev/null +++ b/docs/reference/sql-mapper/entities/api/index.html @@ -0,0 +1,18 @@ + + + + + +API | Platformatic Open Source Software + + + + + +
+
Version: 0.42.1

API

A set of operation methods are available on each entity:

Returned fields

The entity operation methods accept a fields option that can specify an array of field names to be returned. If not specified, all fields will be returned.

Where clause

The entity operation methods accept a where option to allow limiting of the database rows that will be affected by the operation.

The where object's key is the field you want to check, the value is a key/value map where the key is an operator (see the table below) and the value is the value you want to run the operator against.

Platformatic operatorSQL operator
eq'='
in'IN'
nin'NOT IN'
neq'<>'
gt'>'
gte'>='
lt'<'
lte'<='
like'LIKE'

Examples

Selects row with id = 1

{
...
"where": {
id: {
eq: 1
}
}
}

Select all rows with id less than 100

{
...
"where": {
id: {
lt: 100
}
}
}

Select all rows with id 1, 3, 5 or 7

{
...
"where": {
id: {
in: [1, 3, 5, 7]
}
}
}

Where clause operations are by default combined with the AND operator. To combine them with the OR operator, use the or key.

Select all rows with id 1 or 3

{
...
"where": {
or: [
{
id: {
eq: 1
}
},
{
id: {
eq: 3
}
}
]
}
}

Select all rows with id 1 or 3 and title like 'foo%'

{
...
"where": {
or: [
{
id: {
eq: 1
}
},
{
id: {
eq: 3
}
}
],
title: {
like: 'foo%'
}
}
}

Reference

find

Retrieve data for an entity from the database.

Options

NameTypeDescription
fieldsArray of stringList of fields to be returned for each object
whereObjectWhere clause 🔗
orderByArray of ObjectObject like { field: 'counter', direction: 'ASC' }
limitNumberLimits the number of returned elements
offsetNumberThe offset to start looking for rows from

Usage

'use strict'

const { connect } = require('@platformatic/sql-mapper')
const { pino } = require('pino')
const pretty = require('pino-pretty')
const logger = pino(pretty())

async function main() {
const pgConnectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
const mapper = await connect({
connectionString: pgConnectionString,
log: logger,
})
const res = await mapper.entities.page.find({
fields: ['id', 'title',],
where: {
id: {
lt: 10
}
},
})
logger.info(res)
await mapper.db.dispose()
}
main()

count

Same as find, but only count entities.

Options

NameTypeDescription
whereObjectWhere clause 🔗

Usage

'use strict'

const { connect } = require('@platformatic/sql-mapper')
const { pino } = require('pino')
const pretty = require('pino-pretty')
const logger = pino(pretty())

async function main() {
const pgConnectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
const mapper = await connect({
connectionString: pgConnectionString,
log: logger,
})
const res = await mapper.entities.page.count({
where: {
id: {
lt: 10
}
},
})
logger.info(res)
await mapper.db.dispose()
}
main()

insert

Insert one or more entity rows in the database.

Options

NameTypeDescription
fieldsArray of stringList of fields to be returned for each object
inputsArray of ObjectEach object is a new row

Usage

'use strict'

const { connect } = require('@platformatic/sql-mapper')
const { pino } = require('pino')
const pretty = require('pino-pretty')
const logger = pino(pretty())

async function main() {
const pgConnectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
const mapper = await connect({
connectionString: pgConnectionString,
log: logger,
})
const res = await mapper.entities.page.insert({
fields: ['id', 'title' ],
inputs: [
{ title: 'Foobar' },
{ title: 'FizzBuzz' }
],
})
logger.info(res)
/**
0: {
"id": "16",
"title": "Foobar"
}
1: {
"id": "17",
"title": "FizzBuzz"
}
*/
await mapper.db.dispose()
}
main()

save

Create a new entity row in the database or update an existing one.

To update an existing entity, the id field (or equivalent primary key) must be included in the input object. +save actually behaves as an upsert, allowing both behaviours depending on the presence of the primary key field.

Options

NameTypeDescription
fieldsArray of stringList of fields to be returned for each object
inputObjectThe single row to create/update

Usage

'use strict'
const { connect } = require('@platformatic/sql-mapper')
const { pino } = require('pino')
const pretty = require('pino-pretty')
const logger = pino(pretty())

async function main() {
const connectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
const mapper = await connect({
connectionString: connectionString,
log: logger,
})
const res = await mapper.entities.page.save({
fields: ['id', 'title' ],
input: { id: 1, title: 'FizzBuzz' },
})
logger.info(res)
await mapper.db.dispose()
}
main()

delete

Delete one or more entity rows from the database, depending on the where option. Returns the data for all deleted objects.

Options

NameTypeDescription
fieldsArray of stringList of fields to be returned for each object
whereObjectWhere clause 🔗

Usage

'use strict'
const { connect } = require('@platformatic/sql-mapper')
const { pino } = require('pino')
const pretty = require('pino-pretty')
const logger = pino(pretty())

async function main() {
const connectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
const mapper = await connect({
connectionString: connectionString,
log: logger,
})
const res = await mapper.entities.page.delete({
fields: ['id', 'title',],
where: {
id: {
lt: 4
}
},
})
logger.info(res)
await mapper.db.dispose()
}
main()

updateMany

Update one or more entity rows from the database, depending on the where option. Returns the data for all updated objects.

Options

NameTypeDescription
whereObjectWhere clause 🔗
inputObjectThe new values that want to update
fieldsArray of stringList of fields to be returned for each object

Usage

'use strict'
const { connect } = require('@platformatic/sql-mapper')
const { pino } = require('pino')
const pretty = require('pino-pretty')
const logger = pino(pretty())

async function main() {
const connectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
const mapper = await connect({
connectionString: connectionString,
log: logger,
})
const res = await mapper.entities.page.updateMany({
fields: ['id', 'title',],
where: {
counter: {
gte: 30
}
},
input: {
title: 'Updated title'
}
})
logger.info(res)
await mapper.db.dispose()
}
main()

+ + + + \ No newline at end of file diff --git a/docs/reference/sql-mapper/entities/example/index.html b/docs/reference/sql-mapper/entities/example/index.html new file mode 100644 index 00000000000..30f0117238e --- /dev/null +++ b/docs/reference/sql-mapper/entities/example/index.html @@ -0,0 +1,17 @@ + + + + + +Example | Platformatic Open Source Software + + + + + +
+
Version: 0.42.1

Example

Given this PostgreSQL SQL schema:

CREATE TABLE "categories" (
"id" int4 NOT NULL DEFAULT nextval('categories_id_seq'::regclass),
"name" varchar(255) NOT NULL,
PRIMARY KEY ("id")
);

CREATE TABLE "pages" (
"id" int4 NOT NULL DEFAULT nextval('pages_id_seq'::regclass),
"title" varchar(255) NOT NULL,
"category_id" int4,
"user_id" int4,
PRIMARY KEY ("id")
);

ALTER TABLE "pages" ADD FOREIGN KEY ("category_id") REFERENCES "categories"("id");

app.platformatic.entities will contain this mapping object:

{
"category": {
"name": "Category",
"singularName": "category",
"pluralName": "categories",
"primaryKey": "id",
"table": "categories",
"fields": {
"id": {
"sqlType": "int4",
"isNullable": false,
"primaryKey": true,
"camelcase": "id"
},
"name": {
"sqlType": "varchar",
"isNullable": false,
"camelcase": "name"
}
},
"camelCasedFields": {
"id": {
"sqlType": "int4",
"isNullable": false,
"primaryKey": true,
"camelcase": "id"
},
"name": {
"sqlType": "varchar",
"isNullable": false,
"camelcase": "name"
}
},
"relations": [],
"reverseRelationships": [
{
"sourceEntity": "Page",
"relation": {
"constraint_catalog": "postgres",
"constraint_schema": "public",
"constraint_name": "pages_category_id_fkey",
"table_catalog": "postgres",
"table_schema": "public",
"table_name": "pages",
"constraint_type": "FOREIGN KEY",
"is_deferrable": "NO",
"initially_deferred": "NO",
"enforced": "YES",
"column_name": "category_id",
"ordinal_position": 1,
"position_in_unique_constraint": 1,
"foreign_table_name": "categories",
"foreign_column_name": "id"
}
}
]
},
"page": {
"name": "Page",
"singularName": "page",
"pluralName": "pages",
"primaryKey": "id",
"table": "pages",
"fields": {
"id": {
"sqlType": "int4",
"isNullable": false,
"primaryKey": true,
"camelcase": "id"
},
"title": {
"sqlType": "varchar",
"isNullable": false,
"camelcase": "title"
},
"category_id": {
"sqlType": "int4",
"isNullable": true,
"foreignKey": true,
"camelcase": "categoryId"
},
"user_id": {
"sqlType": "int4",
"isNullable": true,
"camelcase": "userId"
}
},
"camelCasedFields": {
"id": {
"sqlType": "int4",
"isNullable": false,
"primaryKey": true,
"camelcase": "id"
},
"title": {
"sqlType": "varchar",
"isNullable": false,
"camelcase": "title"
},
"categoryId": {
"sqlType": "int4",
"isNullable": true,
"foreignKey": true,
"camelcase": "categoryId"
},
"userId": {
"sqlType": "int4",
"isNullable": true,
"camelcase": "userId"
}
},
"relations": [
{
"constraint_catalog": "postgres",
"constraint_schema": "public",
"constraint_name": "pages_category_id_fkey",
"table_catalog": "postgres",
"table_schema": "public",
"table_name": "pages",
"constraint_type": "FOREIGN KEY",
"is_deferrable": "NO",
"initially_deferred": "NO",
"enforced": "YES",
"column_name": "category_id",
"ordinal_position": 1,
"position_in_unique_constraint": 1,
"foreign_table_name": "categories",
"foreign_column_name": "id"
}
],
"reverseRelationships": []
}
}
+ + + + \ No newline at end of file diff --git a/docs/reference/sql-mapper/entities/fields/index.html b/docs/reference/sql-mapper/entities/fields/index.html new file mode 100644 index 00000000000..c7b64c5ce5c --- /dev/null +++ b/docs/reference/sql-mapper/entities/fields/index.html @@ -0,0 +1,17 @@ + + + + + +Fields | Platformatic Open Source Software + + + + + +
+
Version: 0.42.1

Fields

When Platformatic DB inspects a database's schema, it creates an object for each table that contains a mapping of their fields.

These objects contain the following properties:

  • singularName: singular entity name, based on table name. Uses inflected under the hood.
  • pluralName: plural entity name (i.e 'pages')
  • primaryKey: the field which is identified as primary key.
  • table: original table name
  • fields: an object containing all fields details. Object key is the field name.
  • camelCasedFields: an object containing all fields details in camelcase. If you have a column named user_id you can access it using both userId or user_id

Fields detail

  • sqlType: The original field type. It may vary depending on the underlying DB Engine
  • isNullable: Whether the field can be null or not
  • primaryKey: Whether the field is the primary key or not
  • camelcase: The camelcased value of the field

Example

Given this SQL Schema (for PostgreSQL):

CREATE SEQUENCE IF NOT EXISTS pages_id_seq;
CREATE TABLE "public"."pages" (
"id" int4 NOT NULL DEFAULT nextval('pages_id_seq'::regclass),
"title" varchar,
"body_content" text,
"category_id" int4,
PRIMARY KEY ("id")
);

The resulting mapping object will be:

{
singularName: 'page',
pluralName: 'pages',
primaryKey: 'id',
table: 'pages',
fields: {
id: {
sqlType: 'int4',
isNullable: false,
primaryKey: true,
camelcase: 'id'
},
title: {
sqlType: 'varchar',
isNullable: true,
camelcase: 'title'
},
body_content: {
sqlType: 'text',
isNullable: true,
camelcase: 'bodyContent'
},
category_id: {
sqlType: 'int4',
isNullable: true,
foreignKey: true,
camelcase: 'categoryId'
}
}
camelCasedFields: {
id: {
sqlType: 'int4',
isNullable: false,
primaryKey: true,
camelcase: 'id'
},
title: {
sqlType: 'varchar',
isNullable: true,
camelcase: 'title'
},
bodyContent: {
sqlType: 'text',
isNullable: true,
camelcase: 'bodyContent'
},
categoryId: {
sqlType: 'int4',
isNullable: true,
foreignKey: true,
camelcase: 'categoryId'
}
},
relations: []
}
+ + + + \ No newline at end of file diff --git a/docs/reference/sql-mapper/entities/hooks/index.html b/docs/reference/sql-mapper/entities/hooks/index.html new file mode 100644 index 00000000000..be3d20b5220 --- /dev/null +++ b/docs/reference/sql-mapper/entities/hooks/index.html @@ -0,0 +1,17 @@ + + + + + +Hooks | Platformatic Open Source Software + + + + + +
+
Version: 0.42.1

Hooks

Entity hooks are a way to wrap the API methods for an entity and add custom behaviour.

The Platformatic DB SQL Mapper provides an addEntityHooks(entityName, spec) function that can be used to add hooks for an entity.

How to use hooks

addEntityHooks accepts two arguments:

  1. A string representing the entity name (singularized), for example 'page'.
  2. A key/value object where the key is one of the API methods (find, insert, save, delete) and the value is a callback function. The callback will be called with the original API method and the options that were passed to that method. See the example below.

Usage

'use strict'
const { connect } = require('@platformatic/sql-mapper')
const { pino } = require('pino')
const pretty = require('pino-pretty')
const logger = pino(pretty())

async function main() {
const pgConnectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
const mapper = await connect({
connectionString: pgConnectionString,
log: logger,
})
mapper.addEntityHooks('page', {
find: async (originalFind, opts) => {
// Add a `foo` field with `bar` value to each row
const res = await originalFind(opts)
return res.map((row) => {
row.foo = 'bar'
return row
})
}
})
const res = await mapper.entities.page.find({
fields: ['id', 'title',],
where: {
id: {
lt: 10
}
},
})
logger.info(res)
/**
[
0: {
"id": "5",
"title": "Page 1",
"foo": "bar"
},
1: {
"id": "6",
"title": "Page 2",
"foo": "bar"
}
]
*/
await mapper.db.dispose()
}
main()

Multiple Hooks

Multiple hooks can be added for the same entity and API method, for example:

'use strict'
const { connect } = require('@platformatic/sql-mapper')
const { pino } = require('pino')
const pretty = require('pino-pretty')
const logger = pino(pretty())

async function main() {
const pgConnectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
const mapper = await connect({
connectionString: pgConnectionString,
log: logger,
})
mapper.addEntityHooks('page', {
find: async function firstHook(previousFunction, opts) {
// Add a `foo` field with `bar` value to each row
const res = await previousFunction(opts)
return res.map((row) => {
row.foo = 'bar'
return row
})
}
})
mapper.addEntityHooks('page', {
find: async function secondHook(previousFunction, opts) {
// Add a `bar` field with `baz` value to each row
const res = await previousFunction(opts)
return res.map((row) => {
row.bar = 'baz'
return row
})
}
})
const res = await mapper.entities.page.find({
fields: ['id', 'title',],
where: {
id: {
lt: 10
}
},
})
logger.info(res)
/**
[
0: {
"id": "5",
"title": "Page 1",
"foo": "bar",
"bar": "baz"
},
1: {
"id": "6",
"title": "Page 2",
"foo": "bar",
"bar": "baz"
}
]
*/
await mapper.db.dispose()
}
main()

Since hooks are wrappers, they are being called in reverse order, like the image below

Hooks Lifecycle

So even though we defined two hooks, the Database will be hit only once.

Query result will be processed by firstHook, which will pass the result to secondHook, which will, finally, send the processed result to the original .find({...}) function.

+ + + + \ No newline at end of file diff --git a/docs/reference/sql-mapper/entities/introduction/index.html b/docs/reference/sql-mapper/entities/introduction/index.html new file mode 100644 index 00000000000..c7bf95edc8b --- /dev/null +++ b/docs/reference/sql-mapper/entities/introduction/index.html @@ -0,0 +1,17 @@ + + + + + +Introduction to Entities | Platformatic Open Source Software + + + + + +
+
Version: 0.42.1

Introduction to Entities

The primary goal of Platformatic DB is to read a database schema and generate REST and GraphQL endpoints that enable the execution of CRUD (Create/Retrieve/Update/Delete) operations against the database.

Platformatic DB includes a mapper that reads the schemas of database tables and then generates an entity object for each table.

Platformatic DB is a Fastify application. The Fastify instance object is decorated with the platformatic property, which exposes several APIs that handle the manipulation of data in the database.

Platformatic DB populates the app.platformatic.entities object with data found in database tables.

The keys on the entities object are singularized versions of the table names — for example users becomes user, categories becomes category — and the values are a set of associated metadata and functions.

+ + + + \ No newline at end of file diff --git a/docs/reference/sql-mapper/entities/relations/index.html b/docs/reference/sql-mapper/entities/relations/index.html new file mode 100644 index 00000000000..9f3301955de --- /dev/null +++ b/docs/reference/sql-mapper/entities/relations/index.html @@ -0,0 +1,20 @@ + + + + + +Relations | Platformatic Open Source Software + + + + + +
+
Version: 0.42.1

Relations

When Platformatic DB is reading your database schema, it identifies relationships +between tables and stores metadata on them in the entity object's relations field. +This is achieved by querying the database's internal metadata.

Example

Given this PostgreSQL schema:

CREATE SEQUENCE IF NOT EXISTS categories_id_seq;

CREATE TABLE "categories" (
"id" int4 NOT NULL DEFAULT nextval('categories_id_seq'::regclass),
"name" varchar(255) NOT NULL,
PRIMARY KEY ("id")
);

CREATE SEQUENCE IF NOT EXISTS pages_id_seq;

CREATE TABLE "pages" (
"id" int4 NOT NULL DEFAULT nextval('pages_id_seq'::regclass),
"title" varchar(255) NOT NULL,
"body_content" text,
"category_id" int4,
PRIMARY KEY ("id")
);

ALTER TABLE "pages" ADD FOREIGN KEY ("category_id") REFERENCES "categories"("id");

When this code is run:

'use strict'
const { connect } = require('@platformatic/sql-mapper')
const { pino } = require('pino')
const pretty = require('pino-pretty')
const logger = pino(pretty())

async function main() {
const pgConnectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
const mapper = await connect({
connectionString: pgConnectionString,
log: logger,
})
const pageEntity = mapper.entities.page
console.log(pageEntity.relations)
await mapper.db.dispose()
}
main()

The output will be:

[
{
constraint_catalog: 'postgres',
constraint_schema: 'public',
constraint_name: 'pages_category_id_fkey',
table_catalog: 'postgres',
table_schema: 'public',
table_name: 'pages',
constraint_type: 'FOREIGN KEY',
is_deferrable: 'NO',
initially_deferred: 'NO',
enforced: 'YES',
column_name: 'category_id',
ordinal_position: 1,
position_in_unique_constraint: 1,
foreign_table_name: 'categories',
foreign_column_name: 'id'
}
]

As Platformatic DB supports multiple database engines, the contents of the +relations object will vary depending on the database being used.

The following relations fields are common to all database engines:

  • column_name — the column that stores the foreign key
  • foreign_table_name — the table hosting the related row
  • foreign_column_name — the column in foreign table that identifies the row
+ + + + \ No newline at end of file diff --git a/docs/reference/sql-mapper/entities/timestamps/index.html b/docs/reference/sql-mapper/entities/timestamps/index.html new file mode 100644 index 00000000000..ef85c05beea --- /dev/null +++ b/docs/reference/sql-mapper/entities/timestamps/index.html @@ -0,0 +1,17 @@ + + + + + +Timestamps | Platformatic Open Source Software + + + + + +
+
Version: 0.42.1

Timestamps

Timestamps can be used to automatically set the created_at and updated_at fields on your entities.

Timestamps are enabled by default

Configuration

To disable timestamps, you need to set the autoTimestamp field to false in configuration file:

{
...
"db": {
"connectionString": "postgres://postgres:postgres@127.0.0.1/postgres",
"autoTimestamp": false
},
...
}

Customizing the field names

By default, the created_at and updated_at fields are used. You can customize the field names by setting the createdAt and updatedAt options in autoTimestamp field in configuration file:

{
...
"db": {
"connectionString": "postgres://postgres:postgres@127.0.0.1/postgres",
"autoTimestamp": {
"createdAt": "inserted_at",
"updatedAt": "updated_at"
}
...
}
+ + + + \ No newline at end of file diff --git a/docs/reference/sql-mapper/entities/transactions/index.html b/docs/reference/sql-mapper/entities/transactions/index.html new file mode 100644 index 00000000000..61d5533649b --- /dev/null +++ b/docs/reference/sql-mapper/entities/transactions/index.html @@ -0,0 +1,18 @@ + + + + + +Transactions | Platformatic Open Source Software + + + + + +
+
Version: 0.42.1

Transactions

Platformatic DB entites support transaction through the tx optional parameter. +If the tx parameter is provided, the entity will join the transaction, e.g.:


const { connect } = require('@platformatic/sql-mapper')
const logger = pino(pretty())

async function main() {
const pgConnectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
const { db, entities} = await connect({
connectionString: pgConnectionString,
log: logger,
})

const result = await db.tx(async tx => {
// these two operations will be executed in the same transaction
const authorResult = await entities.author.save({
fields: ['id', 'name'],
input: { name: 'test'},
tx
})
const res = await entities.page.save({
fields: ['title', 'authorId'],
input: { title: 'page title', authorId: authorResult.id },
tx
})
return res
})

}

Throwing an Error triggers a transaction rollback:

    try {
await db.tx(async tx => {
await entities.page.save({
input: { title: 'new page' },
fields: ['title'],
tx
})

// here we have `new page`
const findResult = await entities.page.find({ fields: ['title'], tx })

// (...)

// We force the rollback
throw new Error('rollback')
})
} catch (e) {
// rollback
}

// no 'new page' here...
const afterRollback = await entities.page.find({ fields: ['title'] })

+ + + + \ No newline at end of file diff --git a/docs/reference/sql-mapper/fastify-plugin/index.html b/docs/reference/sql-mapper/fastify-plugin/index.html new file mode 100644 index 00000000000..48d670bb1cf --- /dev/null +++ b/docs/reference/sql-mapper/fastify-plugin/index.html @@ -0,0 +1,17 @@ + + + + + +sql-mapper Fastify Plugin | Platformatic Open Source Software + + + + + +
+
Version: 0.42.1

sql-mapper Fastify Plugin

The @platformatic/sql-mapper package exports a Fastify plugin that can be used out-of the box in a server application.

A connectionString option must be passed to connect to your database.

The plugin decorates the server with a platformatic object that has the following properties:

  • db — the DB wrapper object provided by @databases
  • sql — the SQL query mapper object provided by @databases
  • entities — all entity objects with their API methods
  • addEntityHooks — a function to add a hook to an entity API method.

The plugin also decorates the Fastify Request object with the following:

  • platformaticContext: an object with the following two properties:
    • app, the Fastify application of the given route
    • reply, the Fastify Reply instance matching that request

Usage

'use strict'

const Fastify = require('fastify')
const mapper = require('@platformatic/sql-mapper')

async function main() {
const app = Fastify({
logger: {
level: 'info'
}
})
app.register(mapper.plugin, {
connectionString: 'postgres://postgres:postgres@127.0.0.1/postgres'
})

app.get('/all-pages', async (req, reply) => {
// Optionally get the platformatic context.
// Passing this to all sql-mapper functions allow to apply
// authorization rules to the database queries (amongst other things).
const ctx = req.platformaticContext

// Will return all rows from 'pages' table
const res = await app.platformatic.entities.page.find({ ctx })
return res
})

await app.listen({ port: 3333 })
}

main()
+ + + + \ No newline at end of file diff --git a/docs/reference/sql-mapper/introduction/index.html b/docs/reference/sql-mapper/introduction/index.html new file mode 100644 index 00000000000..69e11924fff --- /dev/null +++ b/docs/reference/sql-mapper/introduction/index.html @@ -0,0 +1,19 @@ + + + + + +Introduction to @platformatic/sql-mapper | Platformatic Open Source Software + + + + + +
+
Version: 0.42.1

Introduction to @platformatic/sql-mapper

@platformatic/sql-mapper is the underlining utility that Platformatic DB uses to create useful utilities to +manipulate your SQL database using JavaScript.

This module is bundled with Platformatic DB via a fastify plugin +The rest of this guide shows how to use this module directly.

Install

npm i @platformatic/sql-mapper

API

connect(opts) : Promise

It will inspect a database schema and return an object containing:

  • db — A database abstraction layer from @databases
  • sql — The SQL builder from @databases
  • entities — An object containing a key for each table found in the schema, with basic CRUD operations. See Entity Reference for details.

The valid options are:

  • connectionString — The Database connection string
  • poolSize - Maximum number of connections in the connection pool. Defaults to 10.
  • log — A logger object (like Pino)
  • onDatabaseLoad — An async function that is called after the connection is established. It will receive db and sql as parameter.
  • ignore — Object used to ignore some tables from building entities. (i.e. { 'versions': true } will ignore versions table)
  • autoTimestamp — Generate timestamp automatically when inserting/updating records.
  • hooks — For each entity name (like Page) you can customize any of the entity API function. Your custom function will receive the original function as first parameter, and then all the other parameters passed to it.

createConnectionPool(opts) : Promise

It will inspect a database schema and return an object containing:

The valid options are:

  • connectionString — The Database connection string
  • poolSize - Maximum number of connections in the connection pool. Defaults to 10.
  • log — A logger object (like Pino)

This utility is useful if you just need to connect to the db without generating any entity.

Code samples

const { connect } = require('@platformatic/sql-mapper')
const { pino } = require('pino')

const logger = pino()

async function onDatabaseLoad (db, sql) {
await db.query(sql`CREATE TABLE pages (
id SERIAL PRIMARY KEY,
title VARCHAR(255) NOT NULL
);`)
}
const connectionString =
'postgres://postgres:postgres@localhost:5432/postgres'
const mapper = await connect({
connectionString,
log: logger,
onDatabaseLoad,
ignore: {},
hooks: {
Page: {
find: async function(_find, opts) {
console.log('hook called');
return await _find(opts)
}
}
}
})
const pageEntity = mapper.entities.page

await mapper.db.query(mapper.sql`SELECT * FROM pages`)
await mapper.db.find('option1', 'option2')
+ + + + \ No newline at end of file diff --git a/docs/reference/sql-openapi/api/index.html b/docs/reference/sql-openapi/api/index.html new file mode 100644 index 00000000000..49242da11f1 --- /dev/null +++ b/docs/reference/sql-openapi/api/index.html @@ -0,0 +1,22 @@ + + + + + +API | Platformatic Open Source Software + + + + + +
+
Version: 0.42.1

API

Each table is mapped to an entity named after table's name.

In the following reference we'll use some placeholders, but let's see an example

Example

Given this SQL executed against your database:

CREATE TABLE pages (
id SERIAL PRIMARY KEY,
title VARCHAR(255) NOT NULL,
body TEXT NOT NULL
);
  • [PLURAL_ENTITY_NAME] is pages
  • [SINGULAR_ENTITY_NAME] is page
  • [PRIMARY_KEY] is id
  • fields are id, title, body

GET and POST parameters

Some APIs needs the GET method, where parameters must be defined in the URL, or POST/PUT methods, where parameters can be defined in the http request payload.

Fields

Every API can define a fields parameter, representing the entity fields you want to get back for each row of the table. If not specified all fields are returned.

fields parameter are always sent in query string, even for POST, PUT and DELETE requests, as a comma separated value.

## `GET /[PLURAL_ENTITY_NAME]`

Return all entities matching where clause

Where clause

You can define many WHERE clauses in REST API, each clause includes a field, an operator and a value.

The field is one of the fields found in the schema.

The operator follows this table:

Platformatic operatorSQL operator
eq'='
in'IN'
nin'NOT IN'
neq'<>'
gt'>'
gte'>='
lt'<'
lte'<='

The value is the value you want to compare the field to.

For GET requests all these clauses are specified in the query string using the format where.[FIELD].[OPERATOR]=[VALUE]

Example

If you want to get the title and the body of every page where id < 15 you can make an HTTP request like this:

$ curl -X 'GET' \
'http://localhost:3042/pages/?fields=body,title&where.id.lt=15' \
-H 'accept: application/json'

Where clause operations are by default combined with the AND operator. To create an OR condition use the where.or query param.

Each where.or query param can contain multiple conditions separated by a | (pipe).

The where.or conditions are similar to the where conditions, except that they don't have the where prefix.

Example

If you want to get the posts where counter = 10 OR counter > 30 you can make an HTTP request like this:

$ curl -X 'GET' \
'http://localhost:3042/pages/?where.or=(counter.eq=10|counter.gte=30)' \
-H 'accept: application/json'

OrderBy clause

You can define the ordering of the returned rows within your REST API calls with the orderby clause using the following pattern:

?orderby.[field]=[asc | desc]

The field is one of the fields found in the schema. +The value can be asc or desc.

Example

If you want to get the pages ordered alphabetically by their titles you can make an HTTP request like this:

$ curl -X 'GET' \
'http://localhost:3042/pages?orderby.title=asc' \
-H 'accept: application/json'

Total Count

If totalCount boolean is true in query, the GET returns the total number of elements in the X-Total-Count header ignoring limit and offset (if specified).

$ curl -v -X 'GET' \
'http://localhost:3042/pages/?limit=2&offset=0&totalCount=true' \
-H 'accept: application/json'

(...)
> HTTP/1.1 200 OK
> x-total-count: 18
(...)

[{"id":1,"title":"Movie1"},{"id":2,"title":"Movie2"}]%

POST [PLURAL_ENTITY_NAME]

Creates a new row in table. Expects fields to be sent in a JSON formatted request body.

Example

$ curl -X 'POST' \
'http://localhost:3042/pages/' \
-H 'accept: application/json' \
-H 'Content-Type: application/json' \
-d '{
"title": "Hello World",
"body": "Welcome to Platformatic!"
}'

{
"id": 1,
"title": "Hello World",
"body": "Welcome to Platformatic"
}

GET [PLURAL_ENTITY_NAME]/[PRIMARY_KEY]

Returns a single row, identified by PRIMARY_KEY.

Example

$ curl -X 'GET' 'http://localhost:3042/pages/1?fields=title,body

{
"title": "Hello World",
"body": "Welcome to Platformatic"
}

POST [PLURAL_ENTITY_NAME]/[PRIMARY_KEY]

Updates a row identified by PRIMARY_KEY.

Example

$ curl -X 'POST' \
'http://localhost:3042/pages/1' \
-H 'accept: application/json' \
-H 'Content-Type: application/json' \
-d '{
"title": "Hello Platformatic!",
"body": "Welcome to Platformatic!"
}'

{
"id": 1,
"title": "Hello Platformatic!",
"body": "Welcome to Platformatic"
}

PUT [PLURAL_ENTITY_NAME]/[PRIMARY_KEY]

Same as POST [PLURAL_ENTITY_NAME]/[PRIMARY_KEY].

## `PUT [PLURAL_ENTITY_NAME]`

Updates all entities matching where clause

Example

$ curl -X 'PUT' \
'http://localhost:3042/pages?where.id.in=1,2' \
-H 'accept: application/json' \
-H 'Content-Type: application/json' \
-d '{
"title": "Updated title!",
"body": "Updated body!"
}'

[{
"id": 1,
"title": "Updated title!",
"body": "Updated body!"
},{
"id": 2,
"title": "Updated title!",
"body": "Updated body!"
}]

DELETE [PLURAL_ENTITY_NAME]/[PRIMARY_KEY]

Deletes a row identified by the PRIMARY_KEY.

Example

$ curl -X 'DELETE' 'http://localhost:3042/pages/1?fields=title'

{
"title": "Hello Platformatic!"
}

Nested Relationships

Let's consider the following SQL:

CREATE TABLE IF NOT EXISTS movies (
movie_id INTEGER PRIMARY KEY,
title TEXT NOT NULL
);
CREATE TABLE IF NOT EXISTS quotes (
id INTEGER PRIMARY KEY,
quote TEXT NOT NULL,
movie_id INTEGER NOT NULL REFERENCES movies(movie_id)
);

And:

  • [P_PARENT_ENTITY] is movies
  • [S_PARENT_ENTITY] is movie
  • [P_CHILDREN_ENTITY] is quotes
  • [S_CHILDREN_ENTITY] is quote

In this case, more APIs are available:

GET [P_PARENT_ENTITY]/[PARENT_PRIMARY_KEY]/[P_CHILDREN_ENTITY]

Given a 1-to-many relationship, where a parent entity can have many children, you can query for the children directly.

$ curl -X 'GET' 'http://localhost:3042/movies/1/quotes?fields=quote

[
{
"quote": "I'll be back"
},
{
"quote": "Hasta la vista, baby"
}
]

GET [P_CHILDREN_ENTITY]/[CHILDREN_PRIMARY_KEY]/[S_PARENT_ENTITY]

You can query for the parent directly, e.g.:

$ curl -X 'GET' 'http://localhost:3042/quotes/1/movie?fields=title

{
"title": "Terminator"
}

Many-to-Many Relationships

Many-to-Many relationship lets you relate each row in one table to many rows in +another table and vice versa.

Many-to-many relationship are implemented in SQL via a "join table", a table whose primary key +is composed by the identifier of the two parts of the many-to-many relationship.

Platformatic DB fully support many-to-many relationships on all supported database.

Let's consider the following SQL:

CREATE TABLE pages (
id INTEGER PRIMARY KEY,
the_title VARCHAR(42)
);

CREATE TABLE users (
id INTEGER PRIMARY KEY,
username VARCHAR(255) NOT NULL
);

CREATE TABLE editors (
page_id INTEGER NOT NULL,
user_id INTEGER NOT NULL,
role VARCHAR(255) NOT NULL,
CONSTRAINT fk_editor_pages FOREIGN KEY (page_id) REFERENCES pages(id),
CONSTRAINT fk_editor_users FOREIGN KEY (user_id) REFERENCES users(id),
PRIMARY KEY (page_id, user_id)
);

And:

  • [P_ENTITY] is editors
  • [P_REL_1] is pages
  • [S_REL_1] is page
  • [KEY_REL_1] is pages PRIMARY KEY: pages(id)
  • [P_REL_2] is users
  • [S_REL_2] is user
  • [KEY_REL_2] is users PRIMARY KEY: users(id)

In this case, here the APIs that are available for the join table:

GET [P_ENTITY]/[S_REL_1]/[KEY_REL_1]/[S_REL_2]/[KEY_REL_2]

This returns the entity in the "join table", e.g. GET /editors/page/1/user/1.

POST [P_ENTITY]/[S_REL_1]/[KEY_REL_1]/[S_REL_2]/[KEY_REL_2]

Creates a new entity in the "join table", e.g. POST /editors/page/1/user/1.

PUT [P_ENTITY]/[S_REL_1]/[KEY_REL_1]/[S_REL_2]/[KEY_REL_2]

Updates an entity in the "join table", e.g. PUT /editors/page/1/user/1.

DELETE [P_ENTITY]/[S_REL_1]/[KEY_REL_1]/[S_REL_2]/[KEY_REL_2]

Delete the entity in the "join table", e.g. DELETE /editors/page/1/user/1.

GET /[P_ENTITY]

See the above.

Offset only accepts values >= 0. Otherwise an error is return.

Pagination

The Platformatic DB supports for result's pagination through input parameters: limit and offset

Example

$ curl -X 'GET' 'http://localhost:3042/movies?limit=5&offset=10

[
{
"title": "Star Wars",
"movie_id": 10
},
...
{
"title": "007",
"movie_id": 14
}
]

It returns 5 movies starting from position 10.

TotalCount functionality can be used in order to evaluate if there are more pages.

Limit

By default a limit value (10) is applied to each request.

Clients can override this behavior by passing a value. +In this case the server validates the input and an error is return if exceeds the max accepted value (100).

Limit's values can be customized through configuration:

{
...
"db": {
...
"limit": {
"default": 50,
"max": 1000
}
}
}

Limit only accepts values >= 0. Otherwise an error is return.

Offset

By default offset is not applied to the request. +Clients can override this behavior by passing a value.

Offset only accepts values >= 0. Otherwise an error is return.

+ + + + \ No newline at end of file diff --git a/docs/reference/sql-openapi/ignore/index.html b/docs/reference/sql-openapi/ignore/index.html new file mode 100644 index 00000000000..6b62bd7f0b6 --- /dev/null +++ b/docs/reference/sql-openapi/ignore/index.html @@ -0,0 +1,17 @@ + + + + + +Ignoring entities and fields | Platformatic Open Source Software + + + + + + + + + + \ No newline at end of file diff --git a/docs/reference/sql-openapi/introduction/index.html b/docs/reference/sql-openapi/introduction/index.html new file mode 100644 index 00000000000..55f0d5b3e40 --- /dev/null +++ b/docs/reference/sql-openapi/introduction/index.html @@ -0,0 +1,17 @@ + + + + + +Introduction to the REST API | Platformatic Open Source Software + + + + + +
+
Version: 0.42.1

Introduction to the REST API

The Platformatic DB OpenAPI plugin automatically starts a REST API server (powered by Fastify) that provides CRUD (Create, Read, Update, Delete) functionality for each entity.

Configuration

In the config file, under the "db" section, the OpenAPI server is enabled by default. Although you can disable it setting the property openapi to false.

Example

{
...
"db": {
"openapi": false
}
}

As Platformatic DB uses fastify-swagger under the hood, the "openapi" property can be an object that follows the OpenAPI Specification Object format.

This allows you to extend the output of the Swagger UI documentation.

+ + + + \ No newline at end of file diff --git a/img/docusaurus.png b/img/docusaurus.png new file mode 100644 index 00000000000..f458149e3c8 Binary files /dev/null and b/img/docusaurus.png differ diff --git a/img/fastify-square.svg b/img/fastify-square.svg new file mode 100644 index 00000000000..823d561cc07 --- /dev/null +++ b/img/fastify-square.svg @@ -0,0 +1,8 @@ + + + + diff --git a/img/fastify-white.svg b/img/fastify-white.svg new file mode 100644 index 00000000000..5239cbbacc9 --- /dev/null +++ b/img/fastify-white.svg @@ -0,0 +1,43 @@ + + + + + + + + + + + + + + + diff --git a/img/fastify.svg b/img/fastify.svg new file mode 100644 index 00000000000..0f899c9df15 --- /dev/null +++ b/img/fastify.svg @@ -0,0 +1,44 @@ + + + + + + + + + + + + + + + diff --git a/img/favicon.ico b/img/favicon.ico new file mode 100644 index 00000000000..6b064af16f1 Binary files /dev/null and b/img/favicon.ico differ diff --git a/img/graphql-icon.svg b/img/graphql-icon.svg new file mode 100644 index 00000000000..44c08c21294 --- /dev/null +++ b/img/graphql-icon.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/img/logo.svg b/img/logo.svg new file mode 100644 index 00000000000..9db6d0d066e --- /dev/null +++ b/img/logo.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/img/openapi.svg b/img/openapi.svg new file mode 100644 index 00000000000..7c480f8442c --- /dev/null +++ b/img/openapi.svg @@ -0,0 +1 @@ + diff --git a/img/platformatic-composer-architecture.png b/img/platformatic-composer-architecture.png new file mode 100644 index 00000000000..9625214ba76 Binary files /dev/null and b/img/platformatic-composer-architecture.png differ diff --git a/img/platformatic-db-architecture.png b/img/platformatic-db-architecture.png new file mode 100644 index 00000000000..e19a8cdd18f Binary files /dev/null and b/img/platformatic-db-architecture.png differ diff --git a/img/platformatic-runtime-architecture.png b/img/platformatic-runtime-architecture.png new file mode 100644 index 00000000000..59be54342f0 Binary files /dev/null and b/img/platformatic-runtime-architecture.png differ diff --git a/img/platformatic-service-architecture.png b/img/platformatic-service-architecture.png new file mode 100644 index 00000000000..b5d5ab7a5ef Binary files /dev/null and b/img/platformatic-service-architecture.png differ diff --git a/img/plt-logo-inverted.svg b/img/plt-logo-inverted.svg new file mode 100644 index 00000000000..8ee1ae4115c --- /dev/null +++ b/img/plt-logo-inverted.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/img/plt-logo.svg b/img/plt-logo.svg new file mode 100644 index 00000000000..cdcf00d5e23 --- /dev/null +++ b/img/plt-logo.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/img/undraw_docusaurus_mountain.svg b/img/undraw_docusaurus_mountain.svg new file mode 100644 index 00000000000..af961c49a88 --- /dev/null +++ b/img/undraw_docusaurus_mountain.svg @@ -0,0 +1,171 @@ + + Easy to Use + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/img/undraw_docusaurus_react.svg b/img/undraw_docusaurus_react.svg new file mode 100644 index 00000000000..94b5cf08f88 --- /dev/null +++ b/img/undraw_docusaurus_react.svg @@ -0,0 +1,170 @@ + + Powered by React + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/img/undraw_docusaurus_tree.svg b/img/undraw_docusaurus_tree.svg new file mode 100644 index 00000000000..d9161d33920 --- /dev/null +++ b/img/undraw_docusaurus_tree.svg @@ -0,0 +1,40 @@ + + Focus on What Matters + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/index.html b/index.html new file mode 100644 index 00000000000..495dbd7e422 --- /dev/null +++ b/index.html @@ -0,0 +1,17 @@ + + + + + +Platformatic Open Source Software | Platformatic Open Source Software + + + + + +
+

Platformatic Open Source Software

Why Platformatic?

Platformatic enables developers to efficiently develop and run APIs at scale. Historically, API developers have had to repetitively build infrastructure to satisfy foundational requirements, like authentication, authorization, caching, and connection to databases, and have had to manage microservices with technologies such as service mesh or centralized registries. This work is time consuming, undifferentiated, and painstakingly complex. With growing demands of SaaS applications, the amount of API permutations has grown exponentially and has become a development bottleneck. This has led large organizations to create dedicated platform API engineering teams to help teams deliver on business demands.

Our goal is to make API development simple: we aim is to remove all friction from the day-to-day of backend developers. Platformatic is a series of Open Source tools to build APIs. Check out our announcement video.

Platformatic Service

Setting up new projects and APIs is boring. We want to make it easy for you to get started and to have a production ready setup in no time. Platformatic Service is your starting point for creating a Node.js API on top of the Fastify framework, providing a set of batteries included defaults for all your needs.

Check out the basic Platformatic Service features:

  • Customizable via Node.js and Fastify plugins, with automatic types
  • Automatic TypeScript compilation
  • Prometheus metrics
  • Blazing fast live reloads during development
  • OpenAPI schema generation
  • GraphQL integration
  • Third-party API client generation
  • Batteries included project generator
  • File-system based routing
Platformatic Service Architecture
Platformatic DB Architecture

Platformatic DB

Are you tired of creating Create-Read-Update-Delete (CRUD) APIs? Platformatic DB is a tool that allows you to create both OpenAPI and GraphQL schemas from your database, without having to write a single line of code. The key difference to similar tools is that Platformatic DB allows you to customize it via Node.js and Fastify plugins, because it's based on Platformatic Service.

Check out the basic Platformatic DB features:

  • Automatic OpenAPI/REST API generation from the SQL schema
  • Automatic GraphQL API generation from the SQL schema
  • Multiple databases: SQLite, MySQL, MariaDB, PostgreSQL
  • Multiple authentication methods: JWT, WebHook, HTTP Headers
  • Authorization via role based access control (RBAC)
  • Type-safety via generated types for improved database interactions
  • ...and all the features of Platformatic Service

Platformatic Composer

Want to automatically compose microservices into one ecosystem with a single public API? Platformatic Composer is a new way to develop aggregated APIs, starting with OpenAPI composition across multiple API sources.

Check out the Platformatic Composer features:

  • OpenAPI composition
  • Resolve conflicts between different endpoints
  • Automatic schema refresh
  • Customizable via Node.js and Fastify plugins
  • Automatic TypeScript compilation
  • ...and all the features of Platformatic Service
Platformatic Composer Architecture
Platformatic Runtime Architecture

Platformatic Runtime

The Platformatic Runtime environment enables developers to leverage the perks of microservices with the deployment simplicity of a monolith. It consolidates all your Node.js applications into a single Node.js process, simplifying the development & execution of microservices.

Check out the Platformatic Runtime features:

Platformatic DB creates a GraphQL API from your database schema. It also support Apollo Federation. You can use the API to query and mutate data.

Platformatic DB creates a REST API from your database schema. It generates an OpenAPI 3.0 specification, too.

The roots of Platformatic DB are in the Fastify community. You can customize it with Fastify plugins.

Quotes

James Snell
Node.js Technical Steering Committee

Platformatic DB is a great example of how to build a great developer experience on top of a database.

Jan Lehnardt
Makes @couchdb & #offlinefirst. Made @jsconfeu @greenkeeperio @hoodiehq. CEO at @neighbourh00die.

I always wondered why building APIs isn't that easy.

David Mark Clements
Dave at Holepunch. Tech lead/primary author of OpenJS Foundation JSNAD & JSNSD Certifications. (Original) Author of Node Cookbook.

A platform that simplifies and streamlines developer collaboration within an organization can revolutionise the organization itself. This is digital transformation distilled to its purest and easiest form.

Manuel Spigolon
Senior Software Engineer at NearForm. Fastify Collaborator. Author of "Accelerating Server-Side Development with Fastify".

After building this small project, I think Platformatic DB is not just an ORM as it may seem but an enhanced version of Fastify. It implements a lot of good practices and boring stuff that enable us to spin up a Fastify instance!


+ + + + \ No newline at end of file diff --git a/markdown-page/index.html b/markdown-page/index.html new file mode 100644 index 00000000000..5257ac61908 --- /dev/null +++ b/markdown-page/index.html @@ -0,0 +1,17 @@ + + + + + +Markdown page example | Platformatic Open Source Software + + + + + +
+

Markdown page example

You don't need React to write simple standalone pages.

+ + + + \ No newline at end of file diff --git a/orama-search-index-0.41.1.json.gz b/orama-search-index-0.41.1.json.gz new file mode 100644 index 00000000000..bcc002f5903 Binary files /dev/null and b/orama-search-index-0.41.1.json.gz differ diff --git a/orama-search-index-0.41.2.json.gz b/orama-search-index-0.41.2.json.gz new file mode 100644 index 00000000000..42cbcfb8d01 Binary files /dev/null and b/orama-search-index-0.41.2.json.gz differ diff --git a/orama-search-index-0.41.3.json.gz b/orama-search-index-0.41.3.json.gz new file mode 100644 index 00000000000..129ed6d50ad Binary files /dev/null and b/orama-search-index-0.41.3.json.gz differ diff --git a/orama-search-index-0.42.0.json.gz b/orama-search-index-0.42.0.json.gz new file mode 100644 index 00000000000..185adcf6a9b Binary files /dev/null and b/orama-search-index-0.42.0.json.gz differ diff --git a/orama-search-index-0.42.1.json.gz b/orama-search-index-0.42.1.json.gz new file mode 100644 index 00000000000..9f86a2228f3 Binary files /dev/null and b/orama-search-index-0.42.1.json.gz differ diff --git a/orama-search-index-current.json.gz b/orama-search-index-current.json.gz new file mode 100644 index 00000000000..76b7c1ea29d Binary files /dev/null and b/orama-search-index-current.json.gz differ diff --git a/sitemap.xml b/sitemap.xml new file mode 100644 index 00000000000..0719e7b0e49 --- /dev/null +++ b/sitemap.xml @@ -0,0 +1 @@ +https://docs.platformatic.dev/blogweekly0.5https://docs.platformatic.dev/blog/archiveweekly0.5https://docs.platformatic.dev/blog/coming-soonweekly0.5https://docs.platformatic.dev/markdown-pageweekly0.5https://docs.platformatic.dev/docs/0.41.1/category/getting-startedweekly0.5https://docs.platformatic.dev/docs/0.41.1/category/guidesweekly0.5https://docs.platformatic.dev/docs/0.41.1/category/packagesweekly0.5https://docs.platformatic.dev/docs/0.41.1/category/platformatic-cloudweekly0.5https://docs.platformatic.dev/docs/0.41.1/category/referenceweekly0.5https://docs.platformatic.dev/docs/0.41.1/contributing/weekly0.5https://docs.platformatic.dev/docs/0.41.1/contributing/documentation-style-guideweekly0.5https://docs.platformatic.dev/docs/0.41.1/getting-started/architectureweekly0.5https://docs.platformatic.dev/docs/0.41.1/getting-started/movie-quotes-app-tutorialweekly0.5https://docs.platformatic.dev/docs/0.41.1/getting-started/new-api-project-instructionsweekly0.5https://docs.platformatic.dev/docs/0.41.1/getting-started/quick-start-guideweekly0.5https://docs.platformatic.dev/docs/0.41.1/guides/add-custom-functionality/extend-graphqlweekly0.5https://docs.platformatic.dev/docs/0.41.1/guides/add-custom-functionality/extend-restweekly0.5https://docs.platformatic.dev/docs/0.41.1/guides/add-custom-functionality/introductionweekly0.5https://docs.platformatic.dev/docs/0.41.1/guides/add-custom-functionality/prerequisitesweekly0.5https://docs.platformatic.dev/docs/0.41.1/guides/add-custom-functionality/raw-sqlweekly0.5https://docs.platformatic.dev/docs/0.41.1/guides/compiling-typescript-for-deploymentweekly0.5https://docs.platformatic.dev/docs/0.41.1/guides/debug-platformatic-dbweekly0.5https://docs.platformatic.dev/docs/0.41.1/guides/deploying-on-lambdaweekly0.5https://docs.platformatic.dev/docs/0.41.1/guides/deployment/weekly0.5https://docs.platformatic.dev/docs/0.41.1/guides/deployment/advanced-fly-io-deploymentweekly0.5https://docs.platformatic.dev/docs/0.41.1/guides/deployment/deploy-to-fly-io-with-sqliteweekly0.5https://docs.platformatic.dev/docs/0.41.1/guides/dockerize-platformatic-appweekly0.5https://docs.platformatic.dev/docs/0.41.1/guides/generate-frontend-code-to-consume-platformatic-rest-apiweekly0.5https://docs.platformatic.dev/docs/0.41.1/guides/jwt-auth0weekly0.5https://docs.platformatic.dev/docs/0.41.1/guides/migrating-express-app-to-platformatic-serviceweekly0.5https://docs.platformatic.dev/docs/0.41.1/guides/migrating-fastify-app-to-platformatic-serviceweekly0.5https://docs.platformatic.dev/docs/0.41.1/guides/monitoringweekly0.5https://docs.platformatic.dev/docs/0.41.1/guides/packaging-an-application-as-a-moduleweekly0.5https://docs.platformatic.dev/docs/0.41.1/guides/prismaweekly0.5https://docs.platformatic.dev/docs/0.41.1/guides/securing-platformatic-dbweekly0.5https://docs.platformatic.dev/docs/0.41.1/guides/seed-a-databaseweekly0.5https://docs.platformatic.dev/docs/0.41.1/guides/telemetryweekly0.5https://docs.platformatic.dev/docs/0.41.1/platformatic-cloud/deploy-database-neonweekly0.5https://docs.platformatic.dev/docs/0.41.1/platformatic-cloud/pricingweekly0.5https://docs.platformatic.dev/docs/0.41.1/platformatic-cloud/quick-start-guideweekly0.5https://docs.platformatic.dev/docs/0.41.1/reference/cliweekly0.5https://docs.platformatic.dev/docs/0.41.1/reference/client/frontendweekly0.5https://docs.platformatic.dev/docs/0.41.1/reference/client/introductionweekly0.5https://docs.platformatic.dev/docs/0.41.1/reference/client/programmaticweekly0.5https://docs.platformatic.dev/docs/0.41.1/reference/composer/api-modificationweekly0.5https://docs.platformatic.dev/docs/0.41.1/reference/composer/configurationweekly0.5https://docs.platformatic.dev/docs/0.41.1/reference/composer/introductionweekly0.5https://docs.platformatic.dev/docs/0.41.1/reference/composer/pluginweekly0.5https://docs.platformatic.dev/docs/0.41.1/reference/composer/programmaticweekly0.5https://docs.platformatic.dev/docs/0.41.1/reference/db/authorization/introductionweekly0.5https://docs.platformatic.dev/docs/0.41.1/reference/db/authorization/rulesweekly0.5https://docs.platformatic.dev/docs/0.41.1/reference/db/authorization/strategiesweekly0.5https://docs.platformatic.dev/docs/0.41.1/reference/db/authorization/user-roles-metadataweekly0.5https://docs.platformatic.dev/docs/0.41.1/reference/db/configurationweekly0.5https://docs.platformatic.dev/docs/0.41.1/reference/db/introductionweekly0.5https://docs.platformatic.dev/docs/0.41.1/reference/db/loggingweekly0.5https://docs.platformatic.dev/docs/0.41.1/reference/db/migrationsweekly0.5https://docs.platformatic.dev/docs/0.41.1/reference/db/pluginweekly0.5https://docs.platformatic.dev/docs/0.41.1/reference/db/programmaticweekly0.5https://docs.platformatic.dev/docs/0.41.1/reference/db/schema-supportweekly0.5https://docs.platformatic.dev/docs/0.41.1/reference/runtime/configurationweekly0.5https://docs.platformatic.dev/docs/0.41.1/reference/runtime/introductionweekly0.5https://docs.platformatic.dev/docs/0.41.1/reference/runtime/programmaticweekly0.5https://docs.platformatic.dev/docs/0.41.1/reference/service/configurationweekly0.5https://docs.platformatic.dev/docs/0.41.1/reference/service/introductionweekly0.5https://docs.platformatic.dev/docs/0.41.1/reference/service/pluginweekly0.5https://docs.platformatic.dev/docs/0.41.1/reference/service/programmaticweekly0.5https://docs.platformatic.dev/docs/0.41.1/reference/sql-events/fastify-pluginweekly0.5https://docs.platformatic.dev/docs/0.41.1/reference/sql-events/introductionweekly0.5https://docs.platformatic.dev/docs/0.41.1/reference/sql-graphql/ignoreweekly0.5https://docs.platformatic.dev/docs/0.41.1/reference/sql-graphql/introductionweekly0.5https://docs.platformatic.dev/docs/0.41.1/reference/sql-graphql/many-to-manyweekly0.5https://docs.platformatic.dev/docs/0.41.1/reference/sql-graphql/mutationsweekly0.5https://docs.platformatic.dev/docs/0.41.1/reference/sql-graphql/queriesweekly0.5https://docs.platformatic.dev/docs/0.41.1/reference/sql-graphql/subscriptionsweekly0.5https://docs.platformatic.dev/docs/0.41.1/reference/sql-mapper/entities/apiweekly0.5https://docs.platformatic.dev/docs/0.41.1/reference/sql-mapper/entities/exampleweekly0.5https://docs.platformatic.dev/docs/0.41.1/reference/sql-mapper/entities/fieldsweekly0.5https://docs.platformatic.dev/docs/0.41.1/reference/sql-mapper/entities/hooksweekly0.5https://docs.platformatic.dev/docs/0.41.1/reference/sql-mapper/entities/introductionweekly0.5https://docs.platformatic.dev/docs/0.41.1/reference/sql-mapper/entities/relationsweekly0.5https://docs.platformatic.dev/docs/0.41.1/reference/sql-mapper/entities/timestampsweekly0.5https://docs.platformatic.dev/docs/0.41.1/reference/sql-mapper/entities/transactionsweekly0.5https://docs.platformatic.dev/docs/0.41.1/reference/sql-mapper/fastify-pluginweekly0.5https://docs.platformatic.dev/docs/0.41.1/reference/sql-mapper/introductionweekly0.5https://docs.platformatic.dev/docs/0.41.1/reference/sql-openapi/apiweekly0.5https://docs.platformatic.dev/docs/0.41.1/reference/sql-openapi/ignoreweekly0.5https://docs.platformatic.dev/docs/0.41.1/reference/sql-openapi/introductionweekly0.5https://docs.platformatic.dev/docs/0.41.2/category/getting-startedweekly0.5https://docs.platformatic.dev/docs/0.41.2/category/guidesweekly0.5https://docs.platformatic.dev/docs/0.41.2/category/packagesweekly0.5https://docs.platformatic.dev/docs/0.41.2/category/platformatic-cloudweekly0.5https://docs.platformatic.dev/docs/0.41.2/category/referenceweekly0.5https://docs.platformatic.dev/docs/0.41.2/contributing/weekly0.5https://docs.platformatic.dev/docs/0.41.2/contributing/documentation-style-guideweekly0.5https://docs.platformatic.dev/docs/0.41.2/getting-started/architectureweekly0.5https://docs.platformatic.dev/docs/0.41.2/getting-started/movie-quotes-app-tutorialweekly0.5https://docs.platformatic.dev/docs/0.41.2/getting-started/new-api-project-instructionsweekly0.5https://docs.platformatic.dev/docs/0.41.2/getting-started/quick-start-guideweekly0.5https://docs.platformatic.dev/docs/0.41.2/guides/add-custom-functionality/extend-graphqlweekly0.5https://docs.platformatic.dev/docs/0.41.2/guides/add-custom-functionality/extend-restweekly0.5https://docs.platformatic.dev/docs/0.41.2/guides/add-custom-functionality/introductionweekly0.5https://docs.platformatic.dev/docs/0.41.2/guides/add-custom-functionality/prerequisitesweekly0.5https://docs.platformatic.dev/docs/0.41.2/guides/add-custom-functionality/raw-sqlweekly0.5https://docs.platformatic.dev/docs/0.41.2/guides/compiling-typescript-for-deploymentweekly0.5https://docs.platformatic.dev/docs/0.41.2/guides/debug-platformatic-dbweekly0.5https://docs.platformatic.dev/docs/0.41.2/guides/deploying-on-lambdaweekly0.5https://docs.platformatic.dev/docs/0.41.2/guides/deployment/weekly0.5https://docs.platformatic.dev/docs/0.41.2/guides/deployment/advanced-fly-io-deploymentweekly0.5https://docs.platformatic.dev/docs/0.41.2/guides/deployment/deploy-to-fly-io-with-sqliteweekly0.5https://docs.platformatic.dev/docs/0.41.2/guides/dockerize-platformatic-appweekly0.5https://docs.platformatic.dev/docs/0.41.2/guides/generate-frontend-code-to-consume-platformatic-rest-apiweekly0.5https://docs.platformatic.dev/docs/0.41.2/guides/jwt-auth0weekly0.5https://docs.platformatic.dev/docs/0.41.2/guides/migrating-express-app-to-platformatic-serviceweekly0.5https://docs.platformatic.dev/docs/0.41.2/guides/migrating-fastify-app-to-platformatic-serviceweekly0.5https://docs.platformatic.dev/docs/0.41.2/guides/monitoringweekly0.5https://docs.platformatic.dev/docs/0.41.2/guides/packaging-an-application-as-a-moduleweekly0.5https://docs.platformatic.dev/docs/0.41.2/guides/prismaweekly0.5https://docs.platformatic.dev/docs/0.41.2/guides/securing-platformatic-dbweekly0.5https://docs.platformatic.dev/docs/0.41.2/guides/seed-a-databaseweekly0.5https://docs.platformatic.dev/docs/0.41.2/guides/telemetryweekly0.5https://docs.platformatic.dev/docs/0.41.2/platformatic-cloud/deploy-database-neonweekly0.5https://docs.platformatic.dev/docs/0.41.2/platformatic-cloud/pricingweekly0.5https://docs.platformatic.dev/docs/0.41.2/platformatic-cloud/quick-start-guideweekly0.5https://docs.platformatic.dev/docs/0.41.2/reference/cliweekly0.5https://docs.platformatic.dev/docs/0.41.2/reference/client/frontendweekly0.5https://docs.platformatic.dev/docs/0.41.2/reference/client/introductionweekly0.5https://docs.platformatic.dev/docs/0.41.2/reference/client/programmaticweekly0.5https://docs.platformatic.dev/docs/0.41.2/reference/composer/api-modificationweekly0.5https://docs.platformatic.dev/docs/0.41.2/reference/composer/configurationweekly0.5https://docs.platformatic.dev/docs/0.41.2/reference/composer/introductionweekly0.5https://docs.platformatic.dev/docs/0.41.2/reference/composer/pluginweekly0.5https://docs.platformatic.dev/docs/0.41.2/reference/composer/programmaticweekly0.5https://docs.platformatic.dev/docs/0.41.2/reference/db/authorization/introductionweekly0.5https://docs.platformatic.dev/docs/0.41.2/reference/db/authorization/rulesweekly0.5https://docs.platformatic.dev/docs/0.41.2/reference/db/authorization/strategiesweekly0.5https://docs.platformatic.dev/docs/0.41.2/reference/db/authorization/user-roles-metadataweekly0.5https://docs.platformatic.dev/docs/0.41.2/reference/db/configurationweekly0.5https://docs.platformatic.dev/docs/0.41.2/reference/db/introductionweekly0.5https://docs.platformatic.dev/docs/0.41.2/reference/db/loggingweekly0.5https://docs.platformatic.dev/docs/0.41.2/reference/db/migrationsweekly0.5https://docs.platformatic.dev/docs/0.41.2/reference/db/pluginweekly0.5https://docs.platformatic.dev/docs/0.41.2/reference/db/programmaticweekly0.5https://docs.platformatic.dev/docs/0.41.2/reference/db/schema-supportweekly0.5https://docs.platformatic.dev/docs/0.41.2/reference/runtime/configurationweekly0.5https://docs.platformatic.dev/docs/0.41.2/reference/runtime/introductionweekly0.5https://docs.platformatic.dev/docs/0.41.2/reference/runtime/programmaticweekly0.5https://docs.platformatic.dev/docs/0.41.2/reference/service/configurationweekly0.5https://docs.platformatic.dev/docs/0.41.2/reference/service/introductionweekly0.5https://docs.platformatic.dev/docs/0.41.2/reference/service/pluginweekly0.5https://docs.platformatic.dev/docs/0.41.2/reference/service/programmaticweekly0.5https://docs.platformatic.dev/docs/0.41.2/reference/sql-events/fastify-pluginweekly0.5https://docs.platformatic.dev/docs/0.41.2/reference/sql-events/introductionweekly0.5https://docs.platformatic.dev/docs/0.41.2/reference/sql-graphql/ignoreweekly0.5https://docs.platformatic.dev/docs/0.41.2/reference/sql-graphql/introductionweekly0.5https://docs.platformatic.dev/docs/0.41.2/reference/sql-graphql/many-to-manyweekly0.5https://docs.platformatic.dev/docs/0.41.2/reference/sql-graphql/mutationsweekly0.5https://docs.platformatic.dev/docs/0.41.2/reference/sql-graphql/queriesweekly0.5https://docs.platformatic.dev/docs/0.41.2/reference/sql-graphql/subscriptionsweekly0.5https://docs.platformatic.dev/docs/0.41.2/reference/sql-mapper/entities/apiweekly0.5https://docs.platformatic.dev/docs/0.41.2/reference/sql-mapper/entities/exampleweekly0.5https://docs.platformatic.dev/docs/0.41.2/reference/sql-mapper/entities/fieldsweekly0.5https://docs.platformatic.dev/docs/0.41.2/reference/sql-mapper/entities/hooksweekly0.5https://docs.platformatic.dev/docs/0.41.2/reference/sql-mapper/entities/introductionweekly0.5https://docs.platformatic.dev/docs/0.41.2/reference/sql-mapper/entities/relationsweekly0.5https://docs.platformatic.dev/docs/0.41.2/reference/sql-mapper/entities/timestampsweekly0.5https://docs.platformatic.dev/docs/0.41.2/reference/sql-mapper/entities/transactionsweekly0.5https://docs.platformatic.dev/docs/0.41.2/reference/sql-mapper/fastify-pluginweekly0.5https://docs.platformatic.dev/docs/0.41.2/reference/sql-mapper/introductionweekly0.5https://docs.platformatic.dev/docs/0.41.2/reference/sql-openapi/apiweekly0.5https://docs.platformatic.dev/docs/0.41.2/reference/sql-openapi/ignoreweekly0.5https://docs.platformatic.dev/docs/0.41.2/reference/sql-openapi/introductionweekly0.5https://docs.platformatic.dev/docs/0.41.3/category/getting-startedweekly0.5https://docs.platformatic.dev/docs/0.41.3/category/guidesweekly0.5https://docs.platformatic.dev/docs/0.41.3/category/packagesweekly0.5https://docs.platformatic.dev/docs/0.41.3/category/platformatic-cloudweekly0.5https://docs.platformatic.dev/docs/0.41.3/category/referenceweekly0.5https://docs.platformatic.dev/docs/0.41.3/contributing/weekly0.5https://docs.platformatic.dev/docs/0.41.3/contributing/documentation-style-guideweekly0.5https://docs.platformatic.dev/docs/0.41.3/getting-started/architectureweekly0.5https://docs.platformatic.dev/docs/0.41.3/getting-started/movie-quotes-app-tutorialweekly0.5https://docs.platformatic.dev/docs/0.41.3/getting-started/new-api-project-instructionsweekly0.5https://docs.platformatic.dev/docs/0.41.3/getting-started/quick-start-guideweekly0.5https://docs.platformatic.dev/docs/0.41.3/guides/add-custom-functionality/extend-graphqlweekly0.5https://docs.platformatic.dev/docs/0.41.3/guides/add-custom-functionality/extend-restweekly0.5https://docs.platformatic.dev/docs/0.41.3/guides/add-custom-functionality/introductionweekly0.5https://docs.platformatic.dev/docs/0.41.3/guides/add-custom-functionality/prerequisitesweekly0.5https://docs.platformatic.dev/docs/0.41.3/guides/add-custom-functionality/raw-sqlweekly0.5https://docs.platformatic.dev/docs/0.41.3/guides/compiling-typescript-for-deploymentweekly0.5https://docs.platformatic.dev/docs/0.41.3/guides/debug-platformatic-dbweekly0.5https://docs.platformatic.dev/docs/0.41.3/guides/deploying-on-lambdaweekly0.5https://docs.platformatic.dev/docs/0.41.3/guides/deployment/weekly0.5https://docs.platformatic.dev/docs/0.41.3/guides/deployment/advanced-fly-io-deploymentweekly0.5https://docs.platformatic.dev/docs/0.41.3/guides/deployment/deploy-to-fly-io-with-sqliteweekly0.5https://docs.platformatic.dev/docs/0.41.3/guides/dockerize-platformatic-appweekly0.5https://docs.platformatic.dev/docs/0.41.3/guides/generate-frontend-code-to-consume-platformatic-rest-apiweekly0.5https://docs.platformatic.dev/docs/0.41.3/guides/jwt-auth0weekly0.5https://docs.platformatic.dev/docs/0.41.3/guides/migrating-express-app-to-platformatic-serviceweekly0.5https://docs.platformatic.dev/docs/0.41.3/guides/migrating-fastify-app-to-platformatic-serviceweekly0.5https://docs.platformatic.dev/docs/0.41.3/guides/monitoringweekly0.5https://docs.platformatic.dev/docs/0.41.3/guides/packaging-an-application-as-a-moduleweekly0.5https://docs.platformatic.dev/docs/0.41.3/guides/prismaweekly0.5https://docs.platformatic.dev/docs/0.41.3/guides/securing-platformatic-dbweekly0.5https://docs.platformatic.dev/docs/0.41.3/guides/seed-a-databaseweekly0.5https://docs.platformatic.dev/docs/0.41.3/guides/telemetryweekly0.5https://docs.platformatic.dev/docs/0.41.3/platformatic-cloud/deploy-database-neonweekly0.5https://docs.platformatic.dev/docs/0.41.3/platformatic-cloud/pricingweekly0.5https://docs.platformatic.dev/docs/0.41.3/platformatic-cloud/quick-start-guideweekly0.5https://docs.platformatic.dev/docs/0.41.3/reference/cliweekly0.5https://docs.platformatic.dev/docs/0.41.3/reference/client/frontendweekly0.5https://docs.platformatic.dev/docs/0.41.3/reference/client/introductionweekly0.5https://docs.platformatic.dev/docs/0.41.3/reference/client/programmaticweekly0.5https://docs.platformatic.dev/docs/0.41.3/reference/composer/api-modificationweekly0.5https://docs.platformatic.dev/docs/0.41.3/reference/composer/configurationweekly0.5https://docs.platformatic.dev/docs/0.41.3/reference/composer/introductionweekly0.5https://docs.platformatic.dev/docs/0.41.3/reference/composer/pluginweekly0.5https://docs.platformatic.dev/docs/0.41.3/reference/composer/programmaticweekly0.5https://docs.platformatic.dev/docs/0.41.3/reference/db/authorization/introductionweekly0.5https://docs.platformatic.dev/docs/0.41.3/reference/db/authorization/rulesweekly0.5https://docs.platformatic.dev/docs/0.41.3/reference/db/authorization/strategiesweekly0.5https://docs.platformatic.dev/docs/0.41.3/reference/db/authorization/user-roles-metadataweekly0.5https://docs.platformatic.dev/docs/0.41.3/reference/db/configurationweekly0.5https://docs.platformatic.dev/docs/0.41.3/reference/db/introductionweekly0.5https://docs.platformatic.dev/docs/0.41.3/reference/db/loggingweekly0.5https://docs.platformatic.dev/docs/0.41.3/reference/db/migrationsweekly0.5https://docs.platformatic.dev/docs/0.41.3/reference/db/pluginweekly0.5https://docs.platformatic.dev/docs/0.41.3/reference/db/programmaticweekly0.5https://docs.platformatic.dev/docs/0.41.3/reference/db/schema-supportweekly0.5https://docs.platformatic.dev/docs/0.41.3/reference/runtime/configurationweekly0.5https://docs.platformatic.dev/docs/0.41.3/reference/runtime/introductionweekly0.5https://docs.platformatic.dev/docs/0.41.3/reference/runtime/programmaticweekly0.5https://docs.platformatic.dev/docs/0.41.3/reference/service/configurationweekly0.5https://docs.platformatic.dev/docs/0.41.3/reference/service/introductionweekly0.5https://docs.platformatic.dev/docs/0.41.3/reference/service/pluginweekly0.5https://docs.platformatic.dev/docs/0.41.3/reference/service/programmaticweekly0.5https://docs.platformatic.dev/docs/0.41.3/reference/sql-events/fastify-pluginweekly0.5https://docs.platformatic.dev/docs/0.41.3/reference/sql-events/introductionweekly0.5https://docs.platformatic.dev/docs/0.41.3/reference/sql-graphql/ignoreweekly0.5https://docs.platformatic.dev/docs/0.41.3/reference/sql-graphql/introductionweekly0.5https://docs.platformatic.dev/docs/0.41.3/reference/sql-graphql/many-to-manyweekly0.5https://docs.platformatic.dev/docs/0.41.3/reference/sql-graphql/mutationsweekly0.5https://docs.platformatic.dev/docs/0.41.3/reference/sql-graphql/queriesweekly0.5https://docs.platformatic.dev/docs/0.41.3/reference/sql-graphql/subscriptionsweekly0.5https://docs.platformatic.dev/docs/0.41.3/reference/sql-mapper/entities/apiweekly0.5https://docs.platformatic.dev/docs/0.41.3/reference/sql-mapper/entities/exampleweekly0.5https://docs.platformatic.dev/docs/0.41.3/reference/sql-mapper/entities/fieldsweekly0.5https://docs.platformatic.dev/docs/0.41.3/reference/sql-mapper/entities/hooksweekly0.5https://docs.platformatic.dev/docs/0.41.3/reference/sql-mapper/entities/introductionweekly0.5https://docs.platformatic.dev/docs/0.41.3/reference/sql-mapper/entities/relationsweekly0.5https://docs.platformatic.dev/docs/0.41.3/reference/sql-mapper/entities/timestampsweekly0.5https://docs.platformatic.dev/docs/0.41.3/reference/sql-mapper/entities/transactionsweekly0.5https://docs.platformatic.dev/docs/0.41.3/reference/sql-mapper/fastify-pluginweekly0.5https://docs.platformatic.dev/docs/0.41.3/reference/sql-mapper/introductionweekly0.5https://docs.platformatic.dev/docs/0.41.3/reference/sql-openapi/apiweekly0.5https://docs.platformatic.dev/docs/0.41.3/reference/sql-openapi/ignoreweekly0.5https://docs.platformatic.dev/docs/0.41.3/reference/sql-openapi/introductionweekly0.5https://docs.platformatic.dev/docs/0.42.0/category/getting-startedweekly0.5https://docs.platformatic.dev/docs/0.42.0/category/guidesweekly0.5https://docs.platformatic.dev/docs/0.42.0/category/packagesweekly0.5https://docs.platformatic.dev/docs/0.42.0/category/platformatic-cloudweekly0.5https://docs.platformatic.dev/docs/0.42.0/category/referenceweekly0.5https://docs.platformatic.dev/docs/0.42.0/contributing/weekly0.5https://docs.platformatic.dev/docs/0.42.0/contributing/documentation-style-guideweekly0.5https://docs.platformatic.dev/docs/0.42.0/getting-started/architectureweekly0.5https://docs.platformatic.dev/docs/0.42.0/getting-started/movie-quotes-app-tutorialweekly0.5https://docs.platformatic.dev/docs/0.42.0/getting-started/new-api-project-instructionsweekly0.5https://docs.platformatic.dev/docs/0.42.0/getting-started/quick-start-guideweekly0.5https://docs.platformatic.dev/docs/0.42.0/guides/add-custom-functionality/extend-graphqlweekly0.5https://docs.platformatic.dev/docs/0.42.0/guides/add-custom-functionality/extend-restweekly0.5https://docs.platformatic.dev/docs/0.42.0/guides/add-custom-functionality/introductionweekly0.5https://docs.platformatic.dev/docs/0.42.0/guides/add-custom-functionality/prerequisitesweekly0.5https://docs.platformatic.dev/docs/0.42.0/guides/add-custom-functionality/raw-sqlweekly0.5https://docs.platformatic.dev/docs/0.42.0/guides/compiling-typescript-for-deploymentweekly0.5https://docs.platformatic.dev/docs/0.42.0/guides/debug-platformatic-dbweekly0.5https://docs.platformatic.dev/docs/0.42.0/guides/deploying-on-lambdaweekly0.5https://docs.platformatic.dev/docs/0.42.0/guides/deployment/weekly0.5https://docs.platformatic.dev/docs/0.42.0/guides/deployment/advanced-fly-io-deploymentweekly0.5https://docs.platformatic.dev/docs/0.42.0/guides/deployment/deploy-to-fly-io-with-sqliteweekly0.5https://docs.platformatic.dev/docs/0.42.0/guides/dockerize-platformatic-appweekly0.5https://docs.platformatic.dev/docs/0.42.0/guides/generate-frontend-code-to-consume-platformatic-rest-apiweekly0.5https://docs.platformatic.dev/docs/0.42.0/guides/jwt-auth0weekly0.5https://docs.platformatic.dev/docs/0.42.0/guides/migrating-express-app-to-platformatic-serviceweekly0.5https://docs.platformatic.dev/docs/0.42.0/guides/migrating-fastify-app-to-platformatic-serviceweekly0.5https://docs.platformatic.dev/docs/0.42.0/guides/monitoringweekly0.5https://docs.platformatic.dev/docs/0.42.0/guides/packaging-an-application-as-a-moduleweekly0.5https://docs.platformatic.dev/docs/0.42.0/guides/prismaweekly0.5https://docs.platformatic.dev/docs/0.42.0/guides/securing-platformatic-dbweekly0.5https://docs.platformatic.dev/docs/0.42.0/guides/seed-a-databaseweekly0.5https://docs.platformatic.dev/docs/0.42.0/guides/telemetryweekly0.5https://docs.platformatic.dev/docs/0.42.0/platformatic-cloud/deploy-database-neonweekly0.5https://docs.platformatic.dev/docs/0.42.0/platformatic-cloud/pricingweekly0.5https://docs.platformatic.dev/docs/0.42.0/platformatic-cloud/quick-start-guideweekly0.5https://docs.platformatic.dev/docs/0.42.0/reference/cliweekly0.5https://docs.platformatic.dev/docs/0.42.0/reference/client/frontendweekly0.5https://docs.platformatic.dev/docs/0.42.0/reference/client/introductionweekly0.5https://docs.platformatic.dev/docs/0.42.0/reference/client/programmaticweekly0.5https://docs.platformatic.dev/docs/0.42.0/reference/composer/api-modificationweekly0.5https://docs.platformatic.dev/docs/0.42.0/reference/composer/configurationweekly0.5https://docs.platformatic.dev/docs/0.42.0/reference/composer/introductionweekly0.5https://docs.platformatic.dev/docs/0.42.0/reference/composer/pluginweekly0.5https://docs.platformatic.dev/docs/0.42.0/reference/composer/programmaticweekly0.5https://docs.platformatic.dev/docs/0.42.0/reference/db/authorization/introductionweekly0.5https://docs.platformatic.dev/docs/0.42.0/reference/db/authorization/rulesweekly0.5https://docs.platformatic.dev/docs/0.42.0/reference/db/authorization/strategiesweekly0.5https://docs.platformatic.dev/docs/0.42.0/reference/db/authorization/user-roles-metadataweekly0.5https://docs.platformatic.dev/docs/0.42.0/reference/db/configurationweekly0.5https://docs.platformatic.dev/docs/0.42.0/reference/db/introductionweekly0.5https://docs.platformatic.dev/docs/0.42.0/reference/db/loggingweekly0.5https://docs.platformatic.dev/docs/0.42.0/reference/db/migrationsweekly0.5https://docs.platformatic.dev/docs/0.42.0/reference/db/pluginweekly0.5https://docs.platformatic.dev/docs/0.42.0/reference/db/programmaticweekly0.5https://docs.platformatic.dev/docs/0.42.0/reference/db/schema-supportweekly0.5https://docs.platformatic.dev/docs/0.42.0/reference/runtime/configurationweekly0.5https://docs.platformatic.dev/docs/0.42.0/reference/runtime/introductionweekly0.5https://docs.platformatic.dev/docs/0.42.0/reference/runtime/programmaticweekly0.5https://docs.platformatic.dev/docs/0.42.0/reference/service/configurationweekly0.5https://docs.platformatic.dev/docs/0.42.0/reference/service/introductionweekly0.5https://docs.platformatic.dev/docs/0.42.0/reference/service/pluginweekly0.5https://docs.platformatic.dev/docs/0.42.0/reference/service/programmaticweekly0.5https://docs.platformatic.dev/docs/0.42.0/reference/sql-events/fastify-pluginweekly0.5https://docs.platformatic.dev/docs/0.42.0/reference/sql-events/introductionweekly0.5https://docs.platformatic.dev/docs/0.42.0/reference/sql-graphql/ignoreweekly0.5https://docs.platformatic.dev/docs/0.42.0/reference/sql-graphql/introductionweekly0.5https://docs.platformatic.dev/docs/0.42.0/reference/sql-graphql/many-to-manyweekly0.5https://docs.platformatic.dev/docs/0.42.0/reference/sql-graphql/mutationsweekly0.5https://docs.platformatic.dev/docs/0.42.0/reference/sql-graphql/queriesweekly0.5https://docs.platformatic.dev/docs/0.42.0/reference/sql-graphql/subscriptionsweekly0.5https://docs.platformatic.dev/docs/0.42.0/reference/sql-mapper/entities/apiweekly0.5https://docs.platformatic.dev/docs/0.42.0/reference/sql-mapper/entities/exampleweekly0.5https://docs.platformatic.dev/docs/0.42.0/reference/sql-mapper/entities/fieldsweekly0.5https://docs.platformatic.dev/docs/0.42.0/reference/sql-mapper/entities/hooksweekly0.5https://docs.platformatic.dev/docs/0.42.0/reference/sql-mapper/entities/introductionweekly0.5https://docs.platformatic.dev/docs/0.42.0/reference/sql-mapper/entities/relationsweekly0.5https://docs.platformatic.dev/docs/0.42.0/reference/sql-mapper/entities/timestampsweekly0.5https://docs.platformatic.dev/docs/0.42.0/reference/sql-mapper/entities/transactionsweekly0.5https://docs.platformatic.dev/docs/0.42.0/reference/sql-mapper/fastify-pluginweekly0.5https://docs.platformatic.dev/docs/0.42.0/reference/sql-mapper/introductionweekly0.5https://docs.platformatic.dev/docs/0.42.0/reference/sql-openapi/apiweekly0.5https://docs.platformatic.dev/docs/0.42.0/reference/sql-openapi/ignoreweekly0.5https://docs.platformatic.dev/docs/0.42.0/reference/sql-openapi/introductionweekly0.5https://docs.platformatic.dev/docs/next/category/getting-startedweekly0.5https://docs.platformatic.dev/docs/next/category/guidesweekly0.5https://docs.platformatic.dev/docs/next/category/packagesweekly0.5https://docs.platformatic.dev/docs/next/category/platformatic-cloudweekly0.5https://docs.platformatic.dev/docs/next/category/referenceweekly0.5https://docs.platformatic.dev/docs/next/contributing/weekly0.5https://docs.platformatic.dev/docs/next/contributing/documentation-style-guideweekly0.5https://docs.platformatic.dev/docs/next/getting-started/architectureweekly0.5https://docs.platformatic.dev/docs/next/getting-started/movie-quotes-app-tutorialweekly0.5https://docs.platformatic.dev/docs/next/getting-started/new-api-project-instructionsweekly0.5https://docs.platformatic.dev/docs/next/getting-started/quick-start-guideweekly0.5https://docs.platformatic.dev/docs/next/guides/add-custom-functionality/extend-graphqlweekly0.5https://docs.platformatic.dev/docs/next/guides/add-custom-functionality/extend-restweekly0.5https://docs.platformatic.dev/docs/next/guides/add-custom-functionality/introductionweekly0.5https://docs.platformatic.dev/docs/next/guides/add-custom-functionality/prerequisitesweekly0.5https://docs.platformatic.dev/docs/next/guides/add-custom-functionality/raw-sqlweekly0.5https://docs.platformatic.dev/docs/next/guides/compiling-typescript-for-deploymentweekly0.5https://docs.platformatic.dev/docs/next/guides/debug-platformatic-dbweekly0.5https://docs.platformatic.dev/docs/next/guides/deploying-on-lambdaweekly0.5https://docs.platformatic.dev/docs/next/guides/deployment/weekly0.5https://docs.platformatic.dev/docs/next/guides/deployment/advanced-fly-io-deploymentweekly0.5https://docs.platformatic.dev/docs/next/guides/deployment/deploy-to-fly-io-with-sqliteweekly0.5https://docs.platformatic.dev/docs/next/guides/dockerize-platformatic-appweekly0.5https://docs.platformatic.dev/docs/next/guides/generate-frontend-code-to-consume-platformatic-rest-apiweekly0.5https://docs.platformatic.dev/docs/next/guides/jwt-auth0weekly0.5https://docs.platformatic.dev/docs/next/guides/migrating-express-app-to-platformatic-serviceweekly0.5https://docs.platformatic.dev/docs/next/guides/migrating-fastify-app-to-platformatic-serviceweekly0.5https://docs.platformatic.dev/docs/next/guides/monitoringweekly0.5https://docs.platformatic.dev/docs/next/guides/packaging-an-application-as-a-moduleweekly0.5https://docs.platformatic.dev/docs/next/guides/prismaweekly0.5https://docs.platformatic.dev/docs/next/guides/securing-platformatic-dbweekly0.5https://docs.platformatic.dev/docs/next/guides/seed-a-databaseweekly0.5https://docs.platformatic.dev/docs/next/guides/telemetryweekly0.5https://docs.platformatic.dev/docs/next/platformatic-cloud/deploy-database-neonweekly0.5https://docs.platformatic.dev/docs/next/platformatic-cloud/pricingweekly0.5https://docs.platformatic.dev/docs/next/platformatic-cloud/quick-start-guideweekly0.5https://docs.platformatic.dev/docs/next/reference/cliweekly0.5https://docs.platformatic.dev/docs/next/reference/client/frontendweekly0.5https://docs.platformatic.dev/docs/next/reference/client/introductionweekly0.5https://docs.platformatic.dev/docs/next/reference/client/programmaticweekly0.5https://docs.platformatic.dev/docs/next/reference/composer/api-modificationweekly0.5https://docs.platformatic.dev/docs/next/reference/composer/configurationweekly0.5https://docs.platformatic.dev/docs/next/reference/composer/introductionweekly0.5https://docs.platformatic.dev/docs/next/reference/composer/pluginweekly0.5https://docs.platformatic.dev/docs/next/reference/composer/programmaticweekly0.5https://docs.platformatic.dev/docs/next/reference/db/authorization/introductionweekly0.5https://docs.platformatic.dev/docs/next/reference/db/authorization/rulesweekly0.5https://docs.platformatic.dev/docs/next/reference/db/authorization/strategiesweekly0.5https://docs.platformatic.dev/docs/next/reference/db/authorization/user-roles-metadataweekly0.5https://docs.platformatic.dev/docs/next/reference/db/configurationweekly0.5https://docs.platformatic.dev/docs/next/reference/db/introductionweekly0.5https://docs.platformatic.dev/docs/next/reference/db/loggingweekly0.5https://docs.platformatic.dev/docs/next/reference/db/migrationsweekly0.5https://docs.platformatic.dev/docs/next/reference/db/pluginweekly0.5https://docs.platformatic.dev/docs/next/reference/db/programmaticweekly0.5https://docs.platformatic.dev/docs/next/reference/db/schema-supportweekly0.5https://docs.platformatic.dev/docs/next/reference/runtime/configurationweekly0.5https://docs.platformatic.dev/docs/next/reference/runtime/introductionweekly0.5https://docs.platformatic.dev/docs/next/reference/runtime/programmaticweekly0.5https://docs.platformatic.dev/docs/next/reference/service/configurationweekly0.5https://docs.platformatic.dev/docs/next/reference/service/introductionweekly0.5https://docs.platformatic.dev/docs/next/reference/service/pluginweekly0.5https://docs.platformatic.dev/docs/next/reference/service/programmaticweekly0.5https://docs.platformatic.dev/docs/next/reference/sql-events/fastify-pluginweekly0.5https://docs.platformatic.dev/docs/next/reference/sql-events/introductionweekly0.5https://docs.platformatic.dev/docs/next/reference/sql-graphql/ignoreweekly0.5https://docs.platformatic.dev/docs/next/reference/sql-graphql/introductionweekly0.5https://docs.platformatic.dev/docs/next/reference/sql-graphql/many-to-manyweekly0.5https://docs.platformatic.dev/docs/next/reference/sql-graphql/mutationsweekly0.5https://docs.platformatic.dev/docs/next/reference/sql-graphql/queriesweekly0.5https://docs.platformatic.dev/docs/next/reference/sql-graphql/subscriptionsweekly0.5https://docs.platformatic.dev/docs/next/reference/sql-mapper/entities/apiweekly0.5https://docs.platformatic.dev/docs/next/reference/sql-mapper/entities/exampleweekly0.5https://docs.platformatic.dev/docs/next/reference/sql-mapper/entities/fieldsweekly0.5https://docs.platformatic.dev/docs/next/reference/sql-mapper/entities/hooksweekly0.5https://docs.platformatic.dev/docs/next/reference/sql-mapper/entities/introductionweekly0.5https://docs.platformatic.dev/docs/next/reference/sql-mapper/entities/relationsweekly0.5https://docs.platformatic.dev/docs/next/reference/sql-mapper/entities/timestampsweekly0.5https://docs.platformatic.dev/docs/next/reference/sql-mapper/entities/transactionsweekly0.5https://docs.platformatic.dev/docs/next/reference/sql-mapper/fastify-pluginweekly0.5https://docs.platformatic.dev/docs/next/reference/sql-mapper/introductionweekly0.5https://docs.platformatic.dev/docs/next/reference/sql-openapi/apiweekly0.5https://docs.platformatic.dev/docs/next/reference/sql-openapi/ignoreweekly0.5https://docs.platformatic.dev/docs/next/reference/sql-openapi/introductionweekly0.5https://docs.platformatic.dev/docs/category/getting-startedweekly0.5https://docs.platformatic.dev/docs/category/guidesweekly0.5https://docs.platformatic.dev/docs/category/packagesweekly0.5https://docs.platformatic.dev/docs/category/platformatic-cloudweekly0.5https://docs.platformatic.dev/docs/category/referenceweekly0.5https://docs.platformatic.dev/docs/contributing/weekly0.5https://docs.platformatic.dev/docs/contributing/documentation-style-guideweekly0.5https://docs.platformatic.dev/docs/getting-started/architectureweekly0.5https://docs.platformatic.dev/docs/getting-started/movie-quotes-app-tutorialweekly0.5https://docs.platformatic.dev/docs/getting-started/new-api-project-instructionsweekly0.5https://docs.platformatic.dev/docs/getting-started/quick-start-guideweekly0.5https://docs.platformatic.dev/docs/guides/add-custom-functionality/extend-graphqlweekly0.5https://docs.platformatic.dev/docs/guides/add-custom-functionality/extend-restweekly0.5https://docs.platformatic.dev/docs/guides/add-custom-functionality/introductionweekly0.5https://docs.platformatic.dev/docs/guides/add-custom-functionality/prerequisitesweekly0.5https://docs.platformatic.dev/docs/guides/add-custom-functionality/raw-sqlweekly0.5https://docs.platformatic.dev/docs/guides/compiling-typescript-for-deploymentweekly0.5https://docs.platformatic.dev/docs/guides/debug-platformatic-dbweekly0.5https://docs.platformatic.dev/docs/guides/deploying-on-lambdaweekly0.5https://docs.platformatic.dev/docs/guides/deployment/weekly0.5https://docs.platformatic.dev/docs/guides/deployment/advanced-fly-io-deploymentweekly0.5https://docs.platformatic.dev/docs/guides/deployment/deploy-to-fly-io-with-sqliteweekly0.5https://docs.platformatic.dev/docs/guides/dockerize-platformatic-appweekly0.5https://docs.platformatic.dev/docs/guides/generate-frontend-code-to-consume-platformatic-rest-apiweekly0.5https://docs.platformatic.dev/docs/guides/jwt-auth0weekly0.5https://docs.platformatic.dev/docs/guides/migrating-express-app-to-platformatic-serviceweekly0.5https://docs.platformatic.dev/docs/guides/migrating-fastify-app-to-platformatic-serviceweekly0.5https://docs.platformatic.dev/docs/guides/monitoringweekly0.5https://docs.platformatic.dev/docs/guides/packaging-an-application-as-a-moduleweekly0.5https://docs.platformatic.dev/docs/guides/prismaweekly0.5https://docs.platformatic.dev/docs/guides/securing-platformatic-dbweekly0.5https://docs.platformatic.dev/docs/guides/seed-a-databaseweekly0.5https://docs.platformatic.dev/docs/guides/telemetryweekly0.5https://docs.platformatic.dev/docs/platformatic-cloud/deploy-database-neonweekly0.5https://docs.platformatic.dev/docs/platformatic-cloud/pricingweekly0.5https://docs.platformatic.dev/docs/platformatic-cloud/quick-start-guideweekly0.5https://docs.platformatic.dev/docs/reference/cliweekly0.5https://docs.platformatic.dev/docs/reference/client/frontendweekly0.5https://docs.platformatic.dev/docs/reference/client/introductionweekly0.5https://docs.platformatic.dev/docs/reference/client/programmaticweekly0.5https://docs.platformatic.dev/docs/reference/composer/api-modificationweekly0.5https://docs.platformatic.dev/docs/reference/composer/configurationweekly0.5https://docs.platformatic.dev/docs/reference/composer/introductionweekly0.5https://docs.platformatic.dev/docs/reference/composer/pluginweekly0.5https://docs.platformatic.dev/docs/reference/composer/programmaticweekly0.5https://docs.platformatic.dev/docs/reference/db/authorization/introductionweekly0.5https://docs.platformatic.dev/docs/reference/db/authorization/rulesweekly0.5https://docs.platformatic.dev/docs/reference/db/authorization/strategiesweekly0.5https://docs.platformatic.dev/docs/reference/db/authorization/user-roles-metadataweekly0.5https://docs.platformatic.dev/docs/reference/db/configurationweekly0.5https://docs.platformatic.dev/docs/reference/db/introductionweekly0.5https://docs.platformatic.dev/docs/reference/db/loggingweekly0.5https://docs.platformatic.dev/docs/reference/db/migrationsweekly0.5https://docs.platformatic.dev/docs/reference/db/pluginweekly0.5https://docs.platformatic.dev/docs/reference/db/programmaticweekly0.5https://docs.platformatic.dev/docs/reference/db/schema-supportweekly0.5https://docs.platformatic.dev/docs/reference/runtime/configurationweekly0.5https://docs.platformatic.dev/docs/reference/runtime/introductionweekly0.5https://docs.platformatic.dev/docs/reference/runtime/programmaticweekly0.5https://docs.platformatic.dev/docs/reference/service/configurationweekly0.5https://docs.platformatic.dev/docs/reference/service/introductionweekly0.5https://docs.platformatic.dev/docs/reference/service/pluginweekly0.5https://docs.platformatic.dev/docs/reference/service/programmaticweekly0.5https://docs.platformatic.dev/docs/reference/sql-events/fastify-pluginweekly0.5https://docs.platformatic.dev/docs/reference/sql-events/introductionweekly0.5https://docs.platformatic.dev/docs/reference/sql-graphql/ignoreweekly0.5https://docs.platformatic.dev/docs/reference/sql-graphql/introductionweekly0.5https://docs.platformatic.dev/docs/reference/sql-graphql/many-to-manyweekly0.5https://docs.platformatic.dev/docs/reference/sql-graphql/mutationsweekly0.5https://docs.platformatic.dev/docs/reference/sql-graphql/queriesweekly0.5https://docs.platformatic.dev/docs/reference/sql-graphql/subscriptionsweekly0.5https://docs.platformatic.dev/docs/reference/sql-mapper/entities/apiweekly0.5https://docs.platformatic.dev/docs/reference/sql-mapper/entities/exampleweekly0.5https://docs.platformatic.dev/docs/reference/sql-mapper/entities/fieldsweekly0.5https://docs.platformatic.dev/docs/reference/sql-mapper/entities/hooksweekly0.5https://docs.platformatic.dev/docs/reference/sql-mapper/entities/introductionweekly0.5https://docs.platformatic.dev/docs/reference/sql-mapper/entities/relationsweekly0.5https://docs.platformatic.dev/docs/reference/sql-mapper/entities/timestampsweekly0.5https://docs.platformatic.dev/docs/reference/sql-mapper/entities/transactionsweekly0.5https://docs.platformatic.dev/docs/reference/sql-mapper/fastify-pluginweekly0.5https://docs.platformatic.dev/docs/reference/sql-mapper/introductionweekly0.5https://docs.platformatic.dev/docs/reference/sql-openapi/apiweekly0.5https://docs.platformatic.dev/docs/reference/sql-openapi/ignoreweekly0.5https://docs.platformatic.dev/docs/reference/sql-openapi/introductionweekly0.5https://docs.platformatic.dev/weekly0.5 \ No newline at end of file