astro first commit

This commit is contained in:
Niccolo Borgioli 2024-11-22 00:42:48 +01:00
parent 13eb767fa0
commit d4e9b2027e
156 changed files with 11589 additions and 0 deletions

3
.gitattributes vendored Normal file
View File

@ -0,0 +1,3 @@
src/images/* filter=lfs diff=lfs merge=lfs -text
*.afphoto filter=lfs diff=lfs merge=lfs -text
*.afdesign filter=lfs diff=lfs merge=lfs -text

24
.gitignore vendored Normal file
View File

@ -0,0 +1,24 @@
# build output
dist/
# generated types
.astro/
# dependencies
node_modules/
# logs
npm-debug.log*
yarn-debug.log*
yarn-error.log*
pnpm-debug.log*
# environment variables
.env
.env.production
# macOS-specific files
.DS_Store
# jetbrains setting folder
.idea/

4
.vscode/extensions.json vendored Normal file
View File

@ -0,0 +1,4 @@
{
"recommendations": ["astro-build.astro-vscode", "unifiedjs.vscode-mdx"],
"unwantedRecommendations": []
}

11
.vscode/launch.json vendored Normal file
View File

@ -0,0 +1,11 @@
{
"version": "0.2.0",
"configurations": [
{
"command": "./node_modules/.bin/astro dev",
"name": "Development server",
"request": "launch",
"type": "node-terminal"
}
]
}

68
README.md Normal file
View File

@ -0,0 +1,68 @@
# Astro Starter Kit: Blog
```sh
npm create astro@latest -- --template blog
```
[![Open in StackBlitz](https://developer.stackblitz.com/img/open_in_stackblitz.svg)](https://stackblitz.com/github/withastro/astro/tree/latest/examples/blog)
[![Open with CodeSandbox](https://assets.codesandbox.io/github/button-edit-lime.svg)](https://codesandbox.io/p/sandbox/github/withastro/astro/tree/latest/examples/blog)
[![Open in GitHub Codespaces](https://github.com/codespaces/badge.svg)](https://codespaces.new/withastro/astro?devcontainer_path=.devcontainer/blog/devcontainer.json)
> 🧑‍🚀 **Seasoned astronaut?** Delete this file. Have fun!
![blog](https://github.com/withastro/astro/assets/2244813/ff10799f-a816-4703-b967-c78997e8323d)
Features:
- ✅ Minimal styling (make it your own!)
- ✅ 100/100 Lighthouse performance
- ✅ SEO-friendly with canonical URLs and OpenGraph data
- ✅ Sitemap support
- ✅ RSS Feed support
- ✅ Markdown & MDX support
## 🚀 Project Structure
Inside of your Astro project, you'll see the following folders and files:
```text
├── public/
├── src/
│   ├── components/
│   ├── content/
│   ├── layouts/
│   └── pages/
├── astro.config.mjs
├── README.md
├── package.json
└── tsconfig.json
```
Astro looks for `.astro` or `.md` files in the `src/pages/` directory. Each page is exposed as a route based on its file name.
There's nothing special about `src/components/`, but that's where we like to put any Astro/React/Vue/Svelte/Preact components.
The `src/content/` directory contains "collections" of related Markdown and MDX documents. Use `getCollection()` to retrieve posts from `src/content/blog/`, and type-check your frontmatter using an optional schema. See [Astro's Content Collections docs](https://docs.astro.build/en/guides/content-collections/) to learn more.
Any static assets, like images, can be placed in the `public/` directory.
## 🧞 Commands
All commands are run from the root of the project, from a terminal:
| Command | Action |
| :------------------------ | :----------------------------------------------- |
| `npm install` | Installs dependencies |
| `npm run dev` | Starts local dev server at `localhost:4321` |
| `npm run build` | Build your production site to `./dist/` |
| `npm run preview` | Preview your build locally, before deploying |
| `npm run astro ...` | Run CLI commands like `astro add`, `astro check` |
| `npm run astro -- --help` | Get help using the Astro CLI |
## 👀 Want to learn more?
Check out [our documentation](https://docs.astro.build) or jump into our [Discord server](https://astro.build/chat).
## Credit
This theme is based off of the lovely [Bear Blog](https://github.com/HermanMartinus/bearblog/).

15
astro.config.mjs Normal file
View File

@ -0,0 +1,15 @@
// @ts-check
import { defineConfig } from 'astro/config'
import mdx from '@astrojs/mdx'
import sitemap from '@astrojs/sitemap'
import { remarkReadingTime } from './readingTime'
// https://astro.build/config
export default defineConfig({
site: 'https://example.com',
integrations: [mdx(), sitemap()],
markdown: {
remarkPlugins: [remarkReadingTime],
},
})

25
package.json Normal file
View File

@ -0,0 +1,25 @@
{
"private": true,
"type": "module",
"scripts": {
"astro": "astro",
"build": "astro check && astro build",
"dev": "astro dev",
"preview": "astro preview",
"start": "astro dev"
},
"dependencies": {
"@astrojs/check": "^0.9.4",
"@astrojs/mdx": "^3.1.9",
"@astrojs/rss": "^4.0.9",
"@astrojs/sitemap": "^3.2.1",
"@fontsource-variable/jost": "^5.1.1",
"@fontsource-variable/playfair-display": "^5.1.0",
"astro": "^4.16.13",
"mdast-util-to-string": "^4.0.0",
"reading-time": "^1.5.0",
"sharp": "^0.33.5",
"typescript": "^5.6.3"
},
"packageManager": "pnpm@9.12.3"
}

4557
pnpm-lock.yaml generated Normal file

File diff suppressed because it is too large Load Diff

9
public/favicon.svg Normal file
View File

@ -0,0 +1,9 @@
<svg xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 128 128">
<path d="M50.4 78.5a75.1 75.1 0 0 0-28.5 6.9l24.2-65.7c.7-2 1.9-3.2 3.4-3.2h29c1.5 0 2.7 1.2 3.4 3.2l24.2 65.7s-11.6-7-28.5-7L67 45.5c-.4-1.7-1.6-2.8-2.9-2.8-1.3 0-2.5 1.1-2.9 2.7L50.4 78.5Zm-1.1 28.2Zm-4.2-20.2c-2 6.6-.6 15.8 4.2 20.2a17.5 17.5 0 0 1 .2-.7 5.5 5.5 0 0 1 5.7-4.5c2.8.1 4.3 1.5 4.7 4.7.2 1.1.2 2.3.2 3.5v.4c0 2.7.7 5.2 2.2 7.4a13 13 0 0 0 5.7 4.9v-.3l-.2-.3c-1.8-5.6-.5-9.5 4.4-12.8l1.5-1a73 73 0 0 0 3.2-2.2 16 16 0 0 0 6.8-11.4c.3-2 .1-4-.6-6l-.8.6-1.6 1a37 37 0 0 1-22.4 2.7c-5-.7-9.7-2-13.2-6.2Z" />
<style>
path { fill: #000; }
@media (prefers-color-scheme: dark) {
path { fill: #FFF; }
}
</style>
</svg>

After

Width:  |  Height:  |  Size: 749 B

10
readingTime.js Normal file
View File

@ -0,0 +1,10 @@
import getReadingTime from 'reading-time'
import { toString } from 'mdast-util-to-string'
export function remarkReadingTime() {
return function (tree, { data }) {
const textOnPage = toString(tree)
const readingTime = getReadingTime(textOnPage)
data.astro.frontmatter.readingTime = readingTime
}
}

View File

@ -0,0 +1,30 @@
---
import { Image } from 'astro:assets'
import aboutImage from '../content/images/about.webp'
---
<Image src={aboutImage} alt={'tiny me'} />
<style>
img {
position: absolute;
z-index: -1;
object-fit: contain;
width: 24vw;
height: 30vw;
left: 40em;
top: 12em;
max-width: 25em;
}
@media (max-width: 60em) {
img {
position: initial;
width: 100%;
height: 100%;
object-position: right;
max-height: 20em;
margin-top: 4em;
}
}
</style>

View File

@ -0,0 +1,46 @@
---
import '@fontsource-variable/jost'
import '@fontsource-variable/playfair-display'
import '../styles/preflight.css'
import '../styles/global.css'
interface Props {
image?: string
}
const canonicalURL = new URL(Astro.url.pathname, Astro.site)
const { image = '/blog-placeholder-1.jpg' } = Astro.props
const title = 'Astro Blog'
const description = 'Welcome to my website!'
---
<!-- Global Metadata -->
<meta charset="utf-8" />
<meta name="viewport" content="width=device-width,initial-scale=1" />
<link rel="icon" type="image/svg+xml" href="/favicon.svg" />
<meta name="generator" content={Astro.generator} />
<!-- Canonical URL -->
<link rel="canonical" href={canonicalURL} />
<!-- Primary Meta Tags -->
<title>{title}</title>
<meta name="title" content={title} />
<meta name="description" content={description} />
<!-- Open Graph / Facebook -->
<meta property="og:type" content="website" />
<meta property="og:url" content={Astro.url} />
<meta property="og:title" content={title} />
<meta property="og:description" content={description} />
<meta property="og:image" content={new URL(image, Astro.url)} />
<!-- Twitter -->
<meta property="twitter:card" content="summary_large_image" />
<meta property="twitter:url" content={Astro.url} />
<meta property="twitter:title" content={title} />
<meta property="twitter:description" content={description} />
<meta property="twitter:image" content={new URL(image, Astro.url)} />

View File

@ -0,0 +1,17 @@
---
interface Props {
date: Date;
}
const { date } = Astro.props;
---
<time datetime={date.toISOString()}>
{
date.toLocaleDateString('en-us', {
year: 'numeric',
month: 'short',
day: 'numeric',
})
}
</time>

127
src/components/Nav.astro Normal file
View File

@ -0,0 +1,127 @@
---
const { pathname } = Astro.url
const routes = [
{ name: 'About', href: '/about' },
{ name: 'Projects', href: '/projects' },
{ name: 'Blog', href: '/blog' },
{ name: 'Contact', href: '/contact' },
]
---
<nav>
<a href="/">
<h1 class:list={{ active: pathname === '/' }}>NB</h1>
</a>
<ul>
<!-- <li>
<a href="/search">
<Icon icon="search-outline" />
</a>
</li> -->
{
routes.map(({ href, name }) => (
<li>
<a {href}>
<span>{name}</span>
<div class:list={{ active: pathname.startsWith(href) }} />
</a>
</li>
))
}
</ul>
</nav>
<style>
nav {
position: fixed;
top: 0;
bottom: 0;
left: 0;
}
/* OLD */
nav :global(*) {
box-sizing: initial;
}
nav {
padding-top: env(safe-area-inset-top);
padding-bottom: env(safe-area-inset-bottom);
width: 3em;
height: 100%;
background-color: var(--clr-primary);
display: flex;
flex-direction: column;
justify-content: space-between;
align-items: center;
border-right: 0.1em solid var(--clr-secondary);
}
ul {
list-style: none;
margin: 0;
padding: 0;
max-height: 100%;
overflow: auto;
}
a {
writing-mode: vertical-rl;
padding: 1em;
text-decoration: none;
}
li a {
line-height: 1em;
width: 1em;
position: relative;
}
li a span {
z-index: 5;
position: relative;
}
li a div {
z-index: 4;
width: 0.125em;
height: 100%;
top: 0;
left: 1.12em;
position: absolute;
transition: all 500ms ease;
}
li a div.active {
background-color: var(--clr-secondary);
}
li:hover a div:not(.active) {
background-color: var(--clr-light);
}
h1 {
margin: 0;
writing-mode: horizontal-tb;
letter-spacing: -0.15em;
width: 1.15em;
font-size: 1.5em;
}
h1.active {
box-shadow: 0 0.1em var(--clr-secondary);
}
@media (max-width: 30em) {
nav {
width: 2.5em;
}
a {
padding: 0.5em;
}
li a div {
transform: translateX(-0.5em);
}
}
</style>

View File

@ -0,0 +1,39 @@
---
import type { CollectionEntry } from 'astro:content'
import FormattedDate from './FormattedDate.astro'
export type Props = {
post: CollectionEntry<'blog'>
full?: boolean
}
const { post, full = false } = Astro.props
const { remarkPluginFrontmatter } = await post.render()
---
<div class="attributes">
<div>
<FormattedDate date={post.data.date} />
{
full && post.data.updatedDate && post.data.date !== post.data.updatedDate && (
<>
<br />
<small>
Last update: <FormattedDate date={post.data.updatedDate} />
</small>
</>
)
}
</div>
<div>~ {remarkPluginFrontmatter.readingTime.minutes.toFixed(0)} min</div>
</div>
<style>
.attributes {
display: flex;
justify-content: space-between;
font-weight: 400;
margin-top: -0.125em;
}
</style>

View File

@ -0,0 +1,37 @@
---
import type { CollectionEntry } from 'astro:content'
import PostPreview from './PostPreview.astro'
export type Props = {
posts: CollectionEntry<'blog'>[]
}
const { posts } = Astro.props
---
<section>
<ul>
{
posts.map((post) => (
<li>
<a href={`/blog/${post.slug}`}>
<PostPreview {post} />
</a>
</li>
))
}
</ul>
</section>
<style>
section {
padding: 3rem;
}
ul {
max-width: 40rem;
display: flex;
flex-direction: column;
gap: 4rem;
}
</style>

View File

@ -0,0 +1,76 @@
---
import type { CollectionEntry } from 'astro:content'
import Tags from './Tags.astro'
import PostAttributes from './PostAttributes.astro'
import { Image } from 'astro:assets'
export type Props = {
post: CollectionEntry<'blog'>
}
const { post } = Astro.props
---
<section class:list={{ without: !post.data.coverImage }}>
{post.data.coverImage && <Image src={post.data.coverImage} alt={'foo'} />}
<PostAttributes {post} />
<h2>
{post.data.title}
</h2>
<Tags tags={post.data.tags.map((tag) => ({ count: 1, name: tag, href: `/tag/${tag}` }))} />
</section>
<style>
section {
display: block;
}
section > :global(img) {
height: 12em;
}
h2 {
margin-top: 0.25em;
position: relative;
top: 0;
transition: var(--animation);
background-color: var(--clr-light);
}
section :global(img) {
transition: var(--animation);
position: relative;
top: 0;
}
section:hover :global(img) {
top: 2.5rem;
}
section > :global(div) {
opacity: 1;
transition: var(--animation);
}
section:hover > :global(div) {
opacity: 0;
}
section.without {
border: 2px solid var(--clr-primary);
padding: 5%;
width: calc(100% + 10%);
transform: translateX(-5%);
}
img {
width: calc(100% - 0.25em);
object-fit: cover;
object-position: center;
border: 0.125em solid var(--clr-primary);
transition: var(--animation);
transform: scale(1);
margin: 0;
}
img {
transform: scale(1.1);
margin: 1em 0;
}
</style>

View File

@ -0,0 +1,30 @@
---
import type { HTMLAttributes } from 'astro/types'
type Props = HTMLAttributes<'svg'>
---
<svg
{...Astro.props}
width="100%"
height="100%"
viewBox="0 0 1500 650"
version="1.1"
xmlns="http://www.w3.org/2000/svg"
xmlns:xlink="http://www.w3.org/1999/xlink"
xml:space="preserve"
style="fill-rule:evenodd;clip-rule:evenodd;stroke-linejoin:round;stroke-miterlimit:2;"
>
<g>
<path
id="Niccolo-Outline"
d="M20.271,459.439c-1.17,-3.419 3.175,-9.667 10.262,-9.656c47.47,-41.735 98.965,-93.306 153.09,-151.305l141.37,-151.839l46.857,-57.73c4.246,-5.313 12.634,-0.233 7.597,7.121c-24.452,33.091 -49.57,66.559 -72.909,99.021c-4.255,7.012 -28.702,40.073 -54.403,74.754c-31.401,44.917 -56.137,87.345 -72.031,126.472c-5.226,12.173 -10.453,24.346 -15.679,36.52c-18.953,41.143 -39.007,81.367 -59.994,120.815c42.202,-42.593 70.867,-72.932 102.804,-107.146c28.316,-30.334 61.042,-62.614 73.907,-73.637c10.762,-9.09 17.153,-13.711 20.76,-16.467c5.612,-4.288 13.108,-2.443 10.155,8.052c-3.103,9.01 -7.394,18.956 -13.682,30.475c-4.402,8.289 -8.249,16.961 -11.459,24.83c-4.646,11.389 -7.855,20.391 -7.923,25.916c14.003,-8.284 31.688,-21.532 51.119,-36.542c13.038,-8.455 26.163,-16.951 39.441,-25.517c12.498,-8.738 23.006,-15.921 36.217,-21.442c7.292,-2.43 13.246,1.576 10.104,8.202l-8.089,13.678c-6.343,7.45 -13.359,34.46 -11.387,34.695c3.534,0.777 7.149,0.17 10.794,-0.933c23.784,-10.493 45.573,-21.385 69.025,-34.661c6.369,-3.605 11.37,1.451 6.254,5.825c-16.222,12.959 -28.925,27.403 -34.865,44.701c8.792,0.261 20.532,-8.963 33.808,-16.713c8.014,-4.576 16.028,-9.154 20.885,-8.785c11.911,-10.996 25.507,-24.107 43.452,-42.675c13.197,-30.813 29.84,-58.163 48.859,-83.127c17.47,-25.723 37.654,-48.609 58.99,-70.29c22.847,-26.175 44.455,-44.727 71.493,-52.443c7.445,-2.124 16.358,3.673 17.086,14.754c0.351,4.574 0.445,8.416 0.463,12.046c-4.465,23.076 -37.053,52.117 -68.637,80.944c-40.543,34.913 -80.295,70.231 -118.776,106.201c-8.172,17.541 -14.392,35.163 -18.91,52.857c-0.517,3.438 -0.96,6.889 -0.507,10.481c1.697,1.017 5.468,0.45 9.67,-0.445c11.461,-4.152 26.727,-13.293 43.617,-24.563c4.248,-15.98 9.857,-31.063 16.469,-45.483c16.602,-29.693 44.252,-62.684 61.948,-68.886c11.646,-4.081 20.539,3.744 18.176,16.41c-5.323,22.443 -36.961,61.86 -79.496,102.518c3.018,1.49 5.795,0.679 9.36,-1.235c2.52,-0.936 3.644,0.943 2.567,2.551c-1.569,2.342 -2.793,4.242 -3.853,6.808c-4.713,11.41 -19.341,14.129 -25.345,4.372c-15.438,10.943 -30.287,20.032 -42.639,21.265c-13.619,2.275 -19.672,-1.519 -22.759,-7.695c-4.459,-8.734 -0.458,-24.537 8.084,-44.134c-10.253,10.639 -21.336,20.469 -33.584,29.164c-4.302,3.574 -8.532,3.089 -11.05,-1.904c-16.541,11.137 -30.907,20.415 -39.674,19.79c-14.821,-1.058 -15.256,-12.14 0.849,-34.251c-20.141,11.132 -35.112,21.67 -50.324,21.67c-12.367,-0 -16.07,-5.667 -16.095,-15.883c-0.026,-11.108 12.042,-28.307 25.69,-46.49c-6.766,1.251 -29.36,16.484 -36.856,22.894c-31.276,26.743 -56.376,48.181 -80.374,61.851c-28.635,14.765 -30.493,-3.213 -23.688,-22c5.204,-14.365 5.708,-14.079 16.705,-34.027c3.395,-6.157 12.513,-19.838 18.955,-32.346c-21.194,16.195 -63.494,61.438 -108.128,109.894c-54.127,64.575 -96.284,112.081 -119.345,130.945c-8.391,6.863 -21.32,0.204 -14.954,-12.912c30.64,-55.549 61.083,-111.714 91.044,-169.382c10.592,-24.866 23.464,-49.607 38.413,-74.233c15.015,-28.578 59.12,-90.218 110.565,-160.202l-112.922,123.021c-36.339,42.043 -74.641,81.815 -114.295,120.022l-34.998,26.716c-7.575,5.788 -15.046,5.237 -17.274,-1.272Zm584.135,-59.379c2.547,-13.05 7.342,-25.449 13.647,-37.411c13.317,-22.914 32.664,-47.218 46.624,-56.324c10.853,-5.711 15.959,-1.884 14.306,5.387c-14.859,30.917 -42.775,59.263 -74.577,88.348Zm-36.287,-54.812c18.429,-40.636 47.954,-79.719 82.452,-118.106c21.615,-24.592 43.824,-45.573 67.534,-57.425c8.454,-3.531 14.15,-3.27 15.663,2.74c1.208,5.511 1.087,12.919 -0.025,15.884c-10.895,29.06 -98.099,97.654 -165.624,156.907Z"
></path>
<path
d="M790.827,340.284c14.131,-11.65 22.862,-18.483 26.192,-20.401c16.902,-8.922 34.857,-17.829 54.346,-26.712c41.304,-55.97 93.802,-108.542 154.102,-158.745c64.172,-53.215 105.987,-85.713 113.832,-89.283c6.351,-4.057 10.29,-3.836 9.835,2.373c0.537,5.883 -5.707,13.962 -14.222,22.776c-13.989,18.609 -69.137,70.564 -143.638,138.199c-16.254,14.232 -30.536,27.337 -42.56,39.151c-8.867,10.618 -18.587,21.162 -29.312,31.616c29.784,1.071 28.6,23.154 -0.625,46.65c-25.027,24.157 -54.137,45.113 -90.469,60.405c-28.265,41.837 -49.15,77.302 -60.555,104.586c-1.328,3.177 -7.815,18.804 -9.47,27.943c-4.9,27.063 10.015,23.06 21.109,17.971c16.709,-7.664 47.351,-35.697 78.978,-69.143c16.312,-20.342 32.626,-40.673 49.286,-59.531l35.791,-41.013c10.68,-10.696 15.887,-9.64 14.867,4.787l0.595,44.037c0.101,2.758 1.698,4.03 5.102,3.509c14.976,-4.162 29.386,-10.875 43.334,-19.664c3.561,-13.613 14.633,-25.34 29.175,-36.196l22.441,-14.931c18.8,-13.476 37.413,-21.894 55.74,-22.538c6.642,0.777 8.001,4.197 2.512,11.04c-8.132,9.079 -20.747,18.875 -34.694,28.883c-12.008,9.381 -32.042,22.63 -60.721,40.05c-4.734,3.027 -4.639,5.159 -0.543,6.55c30.233,-0.882 54.09,-16.005 77.861,-31.32l78.336,-53.796c6.813,-4.532 7.774,-2.56 4.78,4.683l-13.326,10.376c-11.237,9.236 -21.582,23.435 -30.556,45.247c19.972,-22.566 41.888,-43.948 65.1,-64.539c36.527,-31.836 66.455,-57.911 85.428,-72.972l49.659,-114.434c30.151,-70.206 63.115,-118.463 101.962,-120.814c10.2,-0.617 22.043,5.413 22.793,18.674c0.675,11.94 -9.22,36.621 -20.016,54.793c-17.775,39.721 -72.206,101.77 -143.423,162.579c-18.125,44.481 -37.315,87.848 -57.208,130.48c15.957,-15.122 32.068,-29.228 48.482,-41.326c16.178,-12.339 25.42,-11.829 23.443,9.463c-1.374,12.137 -3.975,23.706 -8.242,34.505c17.261,-20.861 34.969,-39.56 53.598,-53.805c22.833,-19.216 39.045,-22.42 43.464,2.893l0.687,19.386c0.61,3.885 2.56,3.346 4.64,0.295c3.65,-2.837 4.84,-1.296 3.571,3.234l-4.48,4.436c-8.095,6.965 -12.46,3.205 -13.005,-11.534l-2.491,-21.154c-1.277,-5.355 -4.898,-5.045 -9.637,-2.035c-10.262,6.903 -19.7,14.708 -28.47,23.249c-20.622,21.031 -39.722,43.404 -57.37,67.058c-6.958,7.296 -17.433,2.826 -13.117,-9.237c8.703,-18.489 16.414,-38.868 23.309,-60.801c0.513,-1.942 0.625,-3.57 0.555,-5.055c-0.084,-2.158 -1.304,-2.818 -3.997,-1.536c-10.007,4.648 -38.464,30.349 -69.307,58.775c-18.018,39.51 -42.493,81.084 -61.361,112.817c-15.895,26.735 -32.517,51.063 -51.844,77.582c-11.06,15.967 -24.357,29.563 -39.492,41.212c-14.64,9.765 -23.656,4.567 -23.983,-11.631c-0.227,-11.224 12.271,-31.246 29.841,-55.471l16.19,-27.055c37.737,-54.692 75.307,-98.044 112.733,-131.562l64.645,-136.834c-44.996,39.012 -86.331,78.465 -123.784,118.386c-10.374,15.05 -19.756,21.651 -27.489,14.2c-8.665,-7.056 3.682,-22.95 15.182,-38.487c-27.042,20.746 -52.866,39.881 -70.454,48.128c-18.019,9.664 -35.365,16.293 -51.281,16.483c-8.823,0.488 -14.584,-2.983 -17.154,-10.58c-16.049,9.662 -31.681,18.226 -44.512,19.401c-10.209,-0.192 -15.881,-3.907 -15.058,-12.664c1.616,-12.544 4.133,-25.363 7.168,-38.34c0.426,-5.95 0.285,-11.97 -0.325,-18.048c-4.409,1.154 -32.557,35.243 -62.642,72.018c-16.158,24.562 -42.094,53.85 -77.866,87.891c-29.099,26.823 -50.488,34.159 -63.639,23.277c-9.388,-7.768 -9.983,-23.902 -4.601,-44.68c14.14,-41.508 34.508,-77.887 57.319,-112.253l-15.554,0.957c-13.166,2.136 -21.228,-7.217 -20.125,-19.359c0.471,-5.185 5.926,-4.163 6.19,0.805l0.056,4.409c0.144,1.466 0.961,2.324 3.322,1.79c15.214,-2.886 27.175,-5.595 39.417,-8.32l11.731,-16.82c-12.537,7.756 -26.583,12.915 -43.06,13.89c-14.901,-0.493 -9.189,-15.08 6.979,-29.279Zm414.48,86.48c-22.256,40.512 -48.181,82.189 -76.975,126.343c-27.236,35.969 -44.649,56.694 -53.771,62.226c-0.12,-7.817 9.581,-24.513 23.178,-44.73c8.821,-14.641 16.901,-28.071 23.705,-39.421c40.639,-59.455 66.613,-90.413 83.863,-104.418Zm-178.28,-38.987c14.175,-9.07 28.957,-18.853 44.552,-29.591c19.866,-10.959 36.232,-20.597 40.815,-25.795c-27.737,2.819 -70.826,31.038 -85.367,55.386Zm-178.283,-30.729c31.942,-20.031 53.3,-35.788 65.798,-47.965c5.138,-4.37 9.49,-9.322 13.125,-14.804c-6.643,-1.412 -14.627,-1.157 -23.54,0.252c-9.2,9.223 -20.581,19.463 -33.337,30.345c-3.543,4.971 -11.183,15.301 -22.046,32.172Zm-58.64,-1.128c14.859,-1.459 34.671,-14.198 55.997,-28.34c3.655,-4.48 7.289,-8.929 9.624,-13.347c-9.978,3.571 -21.859,9.534 -34.242,16.306c-21.104,15.919 -32.258,23.973 -31.379,25.381Zm115.973,-78.403l33.33,-34.097c20.314,-19.226 41.291,-37.458 63.022,-54.56c31.836,-27.817 62.044,-54.745 89.268,-80.044c24.823,-25.326 53.39,-53.527 53.343,-59.749c-7.426,4.73 -16.767,13.03 -25.831,20.812c-21.766,19.158 -44.251,38.317 -66.962,57.475c-22.656,20.764 -45.415,42.327 -68.292,64.824c-33.48,32.148 -58.19,59.778 -77.878,85.339Zm401.363,-43.122c16.971,-45.011 35.077,-89.057 54.716,-131.801c17.33,-34.503 37.252,-62.015 61.735,-77.223c10.166,-5.541 19.875,-7.148 28.715,-1.277c6.635,6.635 4.298,14.28 2.519,21.567c-8.122,26.528 -25.987,56.266 -44.164,81.113c-25.536,34.905 -65.89,75.934 -103.521,107.621Z"
></path>
<path
d="M1446.02,213.684c-2.16,-9.662 1.698,-13.133 10.952,-10.452c3.849,3.585 8.747,6.193 13.678,6.007c9.177,2.961 12.191,9.881 6.072,15.938c0.637,5.115 -5.038,5.379 -6.199,1.113c-13.591,0.532 -21.806,-3.846 -24.503,-12.606Z"
></path>
</g>
</svg>

View File

@ -0,0 +1,39 @@
---
export type Props = {
progress: number
title: string
}
const { progress, title } = Astro.props
---
<div class="progress">
<span>{title}</span>
<div style={`width: ${progress * 100}%`}></div>
</div>
<style>
.progress {
box-sizing: border-box;
width: 100%;
position: relative;
background: var(--clr-light);
margin: 0.5em 0;
padding: 0.1em 0.5em;
border: 1px solid var(--clr-dark);
}
.progress span {
position: relative;
z-index: 1;
}
.progress div {
height: 100%;
background: var(--clr-primary);
position: absolute;
top: 0;
left: 0;
z-index: 0;
}
</style>

View File

@ -0,0 +1,38 @@
---
export type Props = {
letters: string
even?: boolean
readable?: boolean
}
const { letters, readable = false, even = false } = Astro.props
---
<div class:list={{ even, readable }}>
{even ? [...letters].map((letter) => <span>{letter}</span>) : letters}
</div>
<style>
span {
width: 1em;
text-align: center;
display: inline-block;
}
div {
font-size: min(8vw, 5em);
text-transform: uppercase;
user-select: none;
letter-spacing: 0.35em;
}
div.even {
font-size: 8vw;
}
div.readable {
letter-spacing: initial;
text-transform: initial;
font-size: 2.25rem;
}
</style>

33
src/components/Tag.astro Normal file
View File

@ -0,0 +1,33 @@
---
export type Props = {
name: string
href: string
count: number
}
const { name, href, count } = Astro.props
---
<a {href}>
<div>
{name}
<i>{count}</i>
</div>
</a>
<style>
div {
display: inline-block;
margin: 0.25rem;
padding: 0 0.5rem;
background-color: var(--clr-primary);
width: max-content;
}
i {
font-family: 'Courier New', Courier, monospace;
font-size: 0.85em;
font-weight: bold;
color: var(--clr-secondary);
}
</style>

36
src/components/Tags.astro Normal file
View File

@ -0,0 +1,36 @@
---
import type { ComponentProps } from 'astro/types'
import Tag from './Tag.astro'
export type Props = {
tags: ComponentProps<typeof Tag>[]
rows?: number
}
const { tags, rows = 1 } = Astro.props
const height = rows * 2
---
<div style={`height: ${height}em;`}>
{tags.map((tag) => <Tag {...tag} />)}
</div>
<style>
div {
display: flex;
flex-direction: row;
overflow: auto;
align-items: center;
justify-content: flex-start;
margin: 0 -0.25rem;
}
div::-webkit-scrollbar {
display: none;
}
div {
-ms-overflow-style: none;
scrollbar-width: none;
}
</style>

5
src/consts.ts Normal file
View File

@ -0,0 +1,5 @@
// Place any global data in this file.
// You can import this data from anywhere in your site by using the `import` keyword.
export const SITE_TITLE = 'Astro Blog';
export const SITE_DESCRIPTION = 'Welcome to my website!';

View File

@ -0,0 +1,232 @@
---
title: '5 useful Typescript tricks'
date: '2019-10-06'
categories:
- 'coding'
tags:
- 'tips-and-tricks'
- 'typescript'
coverImage: './images/amador-loureiro-BVyNlchWqzs-unsplash-scaled.jpg'
---
Typescript is a godsend. It is very easy to get started with and for most developers there is no way back once they get the hang of it. Sometimes it can get pretty advanced and intimidating though.
This is why I decided to share 5 of my favourite typescript tips and tricks you might have needed in the past. Some are super basic, some are bit more advanced.
**Update** _07 Okt 2019 @ 07:53_
Reddit user [jakeboone02](https://www.reddit.com/r/typescript/comments/de17xs/5_useful_typescript_tricks_small_tricks_you_might/f2t9prk?utm_source=share&utm_medium=web2x) found an error in the ternary code.
**Update** _06 Okt 2019 @ 15:06_
Reddit user [smeijer87](https://www.reddit.com/r/typescript/comments/de17xs/5_useful_typescript_tricks_small_tricks_you_might/f2qveub?utm_source=share&utm_medium=web2x) found an error in the code for null coalescing.
**Update** _06 Okt 2019 @ 14:47_
A fiendly reader pointed out excluding interface type are called discriminated unions.
1. [react higher-order components](#hoc)
2. [smarter constructor](#constructors)
3. [type checking functions](#type-checking-function)
4. [discriminated unions](#excluding)
5. [optional chaining & null coalescing](#future)
<figure>
![](images/amador-loureiro-BVyNlchWqzs-unsplash-scaled.jpg)
<figcaption>
Photo by [Amador Loureiro](https://unsplash.com/@amadorloureiroblanco?utm_source=unsplash&utm_medium=referral&utm_content=creditCopyText) on [Unsplash](https://unsplash.com/search/photos/type?utm_source=unsplash&utm_medium=referral&utm_content=creditCopyText)
</figcaption>
</figure>
## Higher-order Components
In React [higher order components (HOC)](https://reactjs.org/docs/higher-order-components.html) are very useful tools. Generally they are used to wrap some layout or functionality to some other component. They are simply functions that return another component: basically the same pattern as decorators.
In typescript it can be confusing how to write them _maintaining the right props_ after wrapping the original component. Here you are:
```
import React from 'react'
function withLayout<P extends object>(WrappedComponent: React.ComponentType<P>) {
return (props: P) => (
<div id='app'>
<Header/>
<WrappedComponent {...props}/>
<Footer/>
</div>
);
}
```
Note also that when using the `withLayout` you don't need to specify the generic type explicitly, as typescript will inherit from the function parameter. Super handy!
## Smarter constructors
Let's start by the building block this is based on. It's a basic Javascript trick, not a typescript exclusive at first.
```
class Pizza {
slices: number
name: string
constructor(init) {
Object.assign(this, init)
}
}
const pizza = new Pizza({
slices: 8,
name: 'Margherita',
})
```
What is happening here? With the super handy `Object.assign` simply assigns the object to the class. This is super handy when classes have many constructor parameters. But this is NOT type safe as your IDE/Editor will tell you. How do we fix this?
```
import { NonFunctionKeys } from 'utility-types'
class Pizza {
slices!: number
name?: string
constructor(init: Pick<Pizza, NonFunctionKeys<Pizza>>) {
Object.assign(this, init)
}
eat() {
this.slices = 0
}
}
const pizza = new Pizza({
slices: 8,
name: 'Margherita',
})
```
Let me explain what happens:
This leverages the awesome [utility-types](https://github.com/piotrwitek/utility-types) package. We first take all the keys that are not a function, so we don't overwrite the `eat` method of the class. Then we pick those from the general Pizza type.
This means that `slices` will be required, while `name` will be optional, as they are defined.
## Type-checking Functions
Did you know you can write functions to tell typescript what type something is? This is awesome!
Suppose we have the following interfaces
```
interface Food {
name: string
}
interface Pasta extends Food {
type: 'Spaghetti' | 'Fusilli'
}
interface Pizza extends Food {
slices: number
}
```
Now we could write a `cook` function that accepts both Pasta and Pizza. Typescript itself cannot differentiate between the too.
```
function cook(what: Food) {
if(what === Pizza) ????
}
```
Fortunately there is a nice solution built into typescript.
```
function isPizza(x: Food | Pizza): x is Pizza {
return x.hasOwnProperty('slices')
}
function isPasta(x: Food | Pasta): x is Pasta {
return x.hasOwnProperty('type')
}
function cook(plate: Food) {
if (isPizza(plate)) {
// Plate is now of type Pizza
putInTheOven(plate)
}
if (isPasta(plate)) {
// Plate is now of type Pasta
putInThePan(plate)
}
}
```
Here we define two functions that return `x is Sometype` and return a boolean value based on the input. It's up to you of course to define it properly, but this can be very useful in various situations.
## Discriminated unions
```
type Sqlite = {
type: 'sqlite',
database: string,
}
type PostgreSQL = {
type: 'postgresql',
database: string,
host: string,
post?: number
}
type PossibleConfigs = Sqlite | PostgreSQL
function initialize(config: PossibleConfigs) {}
```
This might look like a simple one, but I often see people putting those sorts of types all into the same interface. By separating the different type of objects you make sure that they are safe. Also the autocomplete will thank you.
## Optional Chaining & Null Coalescing
This are future features that will be introduced in Typescript 3.7 that are very useful and will not be lived without after the release in early November 2019.
Optional chaining is an obvious shorthand. Every time you need to check if a property (especially if nested) exists, you need to do lots of repetitive checking. No more!
```
a && a.b && a.b.c // 🤬
a?.b?.c // 🚀
```
Null coalescing is also a very useful shorthand. You all know the `||` shorthand, often used to initialise a variable if no value is given.
```
const option = something || 'default'
// Sugar for
const option = !!something ? something : 'default'
```
The problem is with values that are actual values, but result as falsy.
```
false || 'default' // => 'default'
0 || 'default' // => 'default'
```
This is where the Null Coalescing comes in.
```
const option = something ?? 'default' // 🚀
// Sugar for
const option = (x === null || x === undefined)
? 'default'
: x
0 ?? 'default' // => 0
false ?? 'default' // => false
```
Basically it only assign the default value if the provided one is `null` or `undefined` so that values like `false` or `0` don't get overwritten.

View File

@ -0,0 +1,76 @@
---
title: "5 JetBrains tips'n'tricks I wish I'd known sooner"
date: '2019-07-03'
categories:
- 'coding'
tags:
- 'ide'
- 'jetbrains'
coverImage: './images/cards-scaled.jpg'
---
Here are some small features that may not be apparent to the newer devs that leverage the JetBrain IDEs. Most of them I discovered by using the.
1. Double Shift for navigating your codebase
2. cmd/ctrl + shift + f for text search
3. Remote Interpreters
4. Syncing settings
5. Reformatting
<figure>
![](images/cards-1024x567.jpg)
<figcaption>
Photo by [Matt Flores](https://unsplash.com/@matdflo?utm_source=unsplash&utm_medium=referral&utm_content=creditCopyText) on [Unsplash](https://unsplash.com/?utm_source=unsplash&utm_medium=referral&utm_content=creditCopyText)
</figcaption>
</figure>
## Double Shift
For many including myself this is the primary way to navigate code and files. Simply press shift two times, type in the file, class or function you are searching for and press enter. This is by far the quickest and most accurate way to navigate code in any Editor or IDE I've tried so far.
Do it once, and you will not go back.
## CMD + Shift + F
**Windows & Linux**: ctrl + shift + f
This is somewhat similar to 2x Shift. The main difference is that double shifts searches mainly for filenames and symbols (function names, class names, etc.) while CMD shift f functions more like a text search.
What makes this really powerful is that you can [regex search](https://www.jetbrains.com/help/idea/tutorial-finding-and-replacing-text-using-regular-expressions.html#Tutorial_Finding_and_Replacing_Text_Using_Regular_Expressions.xml), [mask files by extension](https://www.jetbrains.com/help/idea/finding-and-replacing-text-in-project.html#exclude_type), [exclude folders](https://www.jetbrains.com/help/webstorm/configuring-project-structure.html#022f3834) (e.g. build folders) and search only in specific directories.
If you ever _loose something in your code_ or _maybe your are new to the project_ and don't know where certain parts are located, **this is the way to find it.**
## Remote Interpreters
This is a huge one for me! JetBrains allows you tu run the code on remote machines. This extends also to all the packages you install, the shell in the terminal is automatically opened on the host.
Remote hosts can be either a machine in which you ssh into, a local docker container running a different version of the language that you need.
_You might ask why?_
1. Use a docker container with a specific version of node/python/php/etc. instead of installing it locally on your machine. Basically a virtual environment for every language. Amazing!
2. Maybe you want to run the code on a Raspberry Pi, which has a different architecture. So all the packages you install will be installed on the Raspberry and when you hit _command+r_ the code will execute not on your machine but you still get the logs. Incredible!
To configure **simply go to the** _**run**_ **menu and add a new remote interpreter**.
## Sync Settings across devices
This is very simple. You can sync all your settings, including plugins, to either your JetBrain account or your own git settings repository. When you open the IDE some where else, everything is back to how it was.
Enable by going to: File -> Sync IDE Settings
**Note:** The synchronisation is per-IDE-basis, so your WebStorm settings are not synced with your PyCharm settings of course.
## Reformatting
Yet again a one of the reasons why I can't go back to VSCode. For each language there is a TON of customisation possible when reformatting. You can decide how your spaces should look, commas, imports, semicolons, everything is completely up to you.
You can tinker around with it in the Settings under: Editor -> Code Style -> <your language>
**Bonus:** If you select on a folder in the project view, you can reformat all files inside it, quick and easy. This is especially useful if one has imported some external sources for example.
Thats it, I hope you found some of it useful and that you can enjoy the JetBrain cosmos even more 😉

View File

@ -0,0 +1,266 @@
---
title: 'A guide to Directus for Gatsby or Sapper as CMS'
date: '2020-04-11'
categories:
- 'coding'
tags:
- 'cms'
- 'directus'
- 'gatsby'
- 'sapper'
- 'static-generated'
coverImage: './images/noah-silliman-doBrZnp_wqA-unsplash.jpg'
---
For those who don't know what [Directus](https://directus.io/) is: an open source database first CMS that generates an api. Lot of buzzwords there, but it's truly a cool project that deserves much more attention IMO.
Recently I've used it to deliver some static generated websites that needed some sort of CMS. Think of a blog, or small landing pages. For that kind you can combine it with Gatsby or in this case Sapper to generate static html from the API.
The article will focus on Sapper, but the parts related to Directus are identical for Gatsby, just the frontend will change.
#### What will we do today?
1. [Install Directus](#1)
2. [Create some data and make in publicly available](#2)
3. [Create a super small frontend](#3)
4. [Write a custom hook for Directus that automatically triggers the build whenever content changes in the DB.](#4)
<figure>
![](images/noah-silliman-doBrZnp_wqA-unsplash.jpg)
<figcaption>
Photo by [Noah Silliman](https://unsplash.com/@noahsilliman?utm_source=unsplash&utm_medium=referral&utm_content=creditCopyText) on [Unsplash](https://unsplash.com/s/photos/rabbit?utm_source=unsplash&utm_medium=referral&utm_content=creditCopyText)
</figcaption>
</figure>
## Installing Directus
This should be straight forward. These instructions are adopted from the [official docker guide](https://docs.directus.io/installation/docker.html). I will use Docker for this.
```
# docker-compose.yml
version: "3.7"
services:
mysql:
image: mysql:5.7
volumes:
- ./data/db:/var/lib/mysql
env_file: .env
directus:
image: directus/directus:v8-apache
ports:
- "8000:80"
env_file: .env
volumes:
- ./data/config:/var/directus/config
- ./data/uploads:/var/directus/public/uploads
```
The we run `docker-compose up -d`. After a few seconds we need to initialise Directus.
```
docker-compose run directus install --email some@email.com --password 1337
```
Now you can go to [localhost:8000](http://localhost:8000) and sign in with the credentials you just specified
## Create some data
Now I'm going to create some data to test our blog. First go to the [settings](http://localhost:8080/admin/#/_/settings/collections) and create a new collection. Im a going to call it `posts`.
Then we are going to add a `title` text field and a simple markdown editor with a `body` field.
Lastly we add a simple post with random data.
<figure>
![](images/data.gif)
<figcaption>
Insert collection and data
</figcaption>
</figure>
## Giving permissions
Now we need to give permission to the `public` role so that we don't need an API Key. For the most sites this is perfectly fine, since the data we only expose the data that gets displayed in the website anyways.
Goto the [roles settings](http://localhost:8080/admin/#/_/settings/roles) and click on `public`. There select the tables you want/need for the website.
Gotcha: If you have files (like photos) you also need to enable them for public viewing. Do this by clicking "Show Directus System Collections" and enabling view access to `Files`
<figure>
![](images/permissions.gif)
<figcaption>
Give permissions to the public user
</figcaption>
</figure>
## Building a minimal frontend with sapper
I will not explain how [Sapper](https://sapper.svelte.dev/) works as this is not the focus today. If you don't know Sapper: It's very similar to Nuxt or Next.js with the additional option to even export as static html, so the end result is similar to a Gatsby website. Very powerful and easy to use and code.
```
# Setup
npx degit "sveltejs/sapper-template#rollup" my-blog
cd my-blog
yarn
yarn run dev
# open http://localhost:3000
```
### Load data from Directus
Directus has a [JS SDK](https://docs.directus.io/guides/js-sdk.html) and since we have made data public we don't even need a token or authentication. Awesome 🚀
```
yarn add @directus/sdk-js
```
First we are going to initialise the SDK. The default project name is simply `directus`
```
// ./src/lib/api.js
import DirectusSDK from '@directus/sdk-js'
export const client = new DirectusSDK({
url: 'http://localhost:8000',
project: 'directus'
})
```
Then lets make a server side json loader so that the exported site will not even contact the server afterwards. Completely static html.
```
// ./src/routes/posts.json.js
import { client } from '../lib/api'
export async function get (req, res, next) {
try {
const { data } = await client.getItems('posts')
res.writeHead(200, {
'Content-Type': 'application/json'
})
res.end(JSON.stringify(data))
} catch (e) {
res.writeHead(404, {
'Content-Type': 'application/json'
})
res.end(JSON.stringify({
message: 'Not found'
}))
}
}
```
Finally the svelte component.
```
// ./src/routes/index.svelte
<script context="module">
export async function preload ({ params }) {
const res = await this.fetch('posts.json')
const data = await res.json()
if (res.status === 200) return { data }
else this.error(res.status, 'Not found')
}
</script>
<script>
import Post from '../components/Post.svelte'
export let data
</script>
<div class="wrapper">
{#each data as post}
<Post {post} />
{/each}
</div>
```
## Write a custom hook to trigger a build every time the data changes
When it comes to static generated sites often the easiest way to do things is to simply generate the site every "x" time. That kinda works however there will be many build that don't contain any change and you need to wait for a cron job to see changes. That sucks.
Fortunately Directus supports writing custom hooks! 🎉
I will illustrate the case for [Drone](https://drone.io/), but the approach can be used for any CI/CD server out there.
For that we create a new php file and give it a name. In my case: `drone-hook.php`
```
# ./hooks/drone-hook.php
<?php
function process ($collection, $data) {
$collectionsToWatch = ['posts'];
if(!in_array($collection, $collectionsToWatch)) {
return;
}
$ch = curl_init();
curl_setopt($ch, CURLOPT_URL, 'https://my.domain.com/api/repos/my-username/my-repo/builds');
curl_setopt($ch, CURLOPT_POST, 1);
curl_setopt($ch, CURLOPT_HTTPHEADER, [ 'Authorization: Bearer '.$_ENV['DRONE_TOKEN'] ]);
curl_setopt($ch, CURLOPT_RETURNTRANSFER,true);
curl_exec($ch);
curl_close($ch);
}
return [
'actions' => [
'item.create' => process,
'item.update' => process,
'item.delete' => process,
]
];
```
I've also put the token inside of the `.env` file so that I can safely check my code into a repo and not having to worry about having a token lying around in the codebase.
```
# .env
...
DIRECTUS_DATABASE_PASSWORD=directus
DRONE_TOKEN=my-drone-token
```
The last thing to do is actually load the code into Directus. You can simply mount the `./hooks` folder we just created into the container and reload.
```
# docker-compose.yml
version: "3.7"
...
directus:
...
volumes:
...
- ./hooks:/var/directus/public/extensions/custom/hooks
```
This will trigger a curl post request every time items in a collection listed inside of `$collectionsToWatch` get either created, updated, or deleted.
You will probably need to make some adaptations if you are not using Drone, but at the end it will boil down to making a http request to your build server triggering a new build.

View File

@ -0,0 +1,302 @@
---
title: 'A practical introduction to React Hooks'
date: '2019-05-03'
categories:
- 'coding'
tags:
- 'hooks'
- 'javascript'
- 'react'
coverImage: './images/matt-artz-353210-unsplash-scaled.jpg'
---
Since [React](https://reactjs.org/) 16.8 was published in February Hooks are now officially supported and the API finalised and stable. They arose around the idea of functional programming. In short: they allow to have state in functional components and with custom hooks (we'll have a look at those later) they allow us to reuse and share state logic between multiple components. This article assumes a basic understanding of React.
All the code shown can be found here: [https://git.nicco.io/cupcakearmy/guide-react-hooks](https://git.nicco.io/cupcakearmy/guide-react-hooks)
<figure>
![](images/matt-artz-353210-unsplash-1024x780.jpg)
<figcaption>
wrenches - new tools
</figcaption>
</figure>
#### What we will look at today
1. Class Components vs Functional Components
2. Native React hooks
- `useState`
- `useEffect`
- `useRef`
3. Custom hooks
- `useWindow`
- `useApi` (The real power)
## 1\. Class vs Functional
Let's first have a look at the 'hello world' of react: A simple counter which we can increment or decrement.
###### Class
```
import React from 'react'
class SimpleClass extends React.Component {
constructor(props) {
super(props)
this.state = {
counter: 0,
}
}
componentDidMount() {
console.log('Lets goo 🚀')
setTimeout(() => this.setState({ counter: 5 }), 2000)
}
componentDidUpdate() {
console.log(this.state.counter)
}
render() {
return <div>
<div>{this.state.counter}</div>
<br/>
<button onClick={() => this.setState({ counter: this.state.counter - 1 })}>Decrease</button>
<button onClick={() => this.setState({ counter: this.state.counter + 1 })}>Increase</button>
</div>
}
}
```
Easy! Now we will convert the snippet above to the functional equivalent with the help of hooks.
###### Hooks
```
import React, { useEffect, useState } from 'react'
const SimpleFC = () => {
const [counter, setCounter] = useState(0)
return <div>
<div>{counter}</div>
<br/>
<button onClick={() => setCounter(counter - 1)}>Decrease</button>
<button onClick={() => setCounter(counter + 1)}>Increase</button>
</div>
}
```
Awesome 🚀 Simple enough right?
## 2\. Native React hooks
### useState
Our constructor with `state` is gone and we have a simple `const [counter, setCounter] = useState(0)`.
How does this work? `useState` returns an array wich deconstructed gives us a getter and a setter. The parameter we pass to it is the initial value. That is all. Simple and useful.
### useEffect
How about the timeout and the `console.log`? Welcome `useEffect`!
`useEffect` takes a function and executes it every time the component updates. So it is basically `componentDidMount` and `componentDidUpdate` together.
The second parameter determines when the function will be triggered. It expects an array and checks whether the variables inside it change.
If no array is passed it will trigger every time the component gets updated and or mounted.
This means that you can pass props into the array and it will effect only when those change. Also, if you pass an empty array it will trigger only once and is equivalent to `componentDidMount`.
```
useEffect(myFN) // triggered every time the component gets updated
useEffect(myFN, []) // Only triggered
useEffect(myFN, [prop1, prop2]) // Gets triggered when either the props get changed
```
In our example from above we would use it as follows:
```
import React, { useEffect, useState } from 'react'
const SimpleFC = () => {
const [counter, setCounter] = useState(0)
useEffect(() => {
setTimeout(() => {
setCounter(5)
}, 1000)
}, [])
useEffect(() => {
console.log(counter)
})
return <div>
<div>{counter}</div>
<br/>
<button onClick={() => setCounter(counter - 1)}>Decrease</button>
<button onClick={() => setCounter(counter + 1)}>Increase</button>
</div>
}
```
### useRef
Now let's have a look at `useRef`. We will have a normal class based component and the equivalent functional one with the help of hooks.
###### Class
```
class RefClass extends React.Component {
constructor(props) {
super(props)
this.myRef = React.createRef()
this.change = this.change.bind(this)
}
change() {
this.myRef.current.style.backgroundColor = '#6ba7ee'
}
render() {
return <div>
<button onClick={this.change}>Change Me</button>
<br/><br/>
<div ref={this.myRef} style={{ width: 50, height: 50, backgroundColor: '#000000' }}/>
</div>
}
}
```
###### Hooks
```
const RefFN = () => {
const rect = useRef()
const change = () => rect.current.style.backgroundColor = '#6ba7ee'
return <div>
<button onClick={change}>Change Me</button>
<br/><br/>
<div ref={rect} style={{ width: 50, height: 50, backgroundColor: '#000000' }}/>
</div>
}
```
That is huge improvement in terms of amount code and most importantly readability. `react.current` points to the Dom element, which we can then modify at our will.
As a side node: look how much cleaner we can have class functions. instead of needing binding the function to `this`, in functional components we just need to define them.
## 3\. Custom hooks
This is where the real power lies. With custom hooks react allows you to reuse stateful logic and share it between components. Very powerful.
We will cover two examples:
1. Window size
2. Consume an API
### Window size
Assume you want to make a component dependent on the window size of the browser. With react hooks this is quick, easy and reusable.
###### hooks.js
```
export const useWindowSize = () => {
const getCurrentSize = () => ({ height: window.innerHeight, width: window.innerWidth })
const [size, setSize] = useState(getCurrentSize())
useEffect(() => {
const handle = () => setSize(getCurrentSize())
window.addEventListener('resize', handle)
return () => window.removeEventListener('resize', handle)
})
return size
}
```
###### component.jsx
```
import { useWindowSize } from '../Hooks'
const Custom = ()=> {
const size = useWindowSize()
return <div>
Width: {size.width}
<br/>
Height: {size.height}
</div>
}
```
As we can see we created a custom hook called `useWindowSize`. We now can use our own hook inside of other components.
Custom components are just arrow functions that use the native `useState` and `useEffect` and some custom logic you add.
Note the `return () => window.removeEventListener('resize', handle)` inside the effect function. You can return a function in the effect function that will get called whenever the hook will be unmounted. This allows us to do cleanup. In this case we stop listening for window size changes. Neat 💪
### API Hook
Last but definitely not least: API calls. I personally think this is where hooks really show their power. I'll show you the code first and then explain.
###### hooks.js
```
export const useCallApi = (url) => {
const [data, setData] = useState()
const update = () => {
fetch(url)
.then(response => response.json())
.then(json => setData(json))
}
useEffect(() => {
update()
}, [])
return [data, update]
}
```
###### posts.jsx
```
import { useCallApi } from '../Hooks'
const Posts = () => {
const [posts] = useCallApi(`https://jsonplaceholder.typicode.com/posts`)
const [users] = useCallApi(`https://jsonplaceholder.typicode.com/users`)
// ...
if (!posts) return <div>Loading 🕰</div>
return <div>
{posts.map((post, i) => <div key={i}>
<h3>{post.title}</h3>
<p>{post.body}</p>
</div>)}
</div>
}
```
What is happening? We created a custom hook that queries an API and returns the result. How? We pass a url to the hook and we get the data back.
Internally the hook uses `useState` to save the results. It executes the update functions once (because the use effect has an empty array as second parameter).
Now we can use the `useCallApi` hook in multiple components or many times inside the same component. Options are endless.

View File

@ -0,0 +1,199 @@
---
title: 'A sane and efficient guide for consuming GraphQL endpoints in Typescript'
date: '2021-12-31'
categories:
- 'coding'
tags:
- 'code-generation'
- 'graphql'
- 'typescript'
coverImage: './images/clayton-robbins-Ru09fQONJWo-unsplash-scaled.jpg'
---
GraphQL is becoming common practice in the wild, while I feel the workflow with Typescript is still not straight forward. I want to propose one way to go about it and hopefully make your next Typescript GraphQL project a joy to work with!
Lets dive deeper 🤿.
I created a tiny [companion repository](https://github.com/cupcakearmy/blog-typescript-graphql) if you want to check out the code and try it out.
Or check out the [finished demo](https://blog-typescript-graphql.vercel.app/).
<figure>
![](images/clayton-robbins-Ru09fQONJWo-unsplash-1024x683.jpg)
<figcaption>
Photo by [Clayton Robbins](https://unsplash.com/@claytonrobbins?utm_source=unsplash&utm_medium=referral&utm_content=creditCopyText) on [Unsplash](https://unsplash.com/@claytonrobbins?utm_source=unsplash&utm_medium=referral&utm_content=creditCopyText)
</figcaption>
</figure>
## Intro
First we need to decide on what do we want (and probably need)
- Editor support for syntax highlighting `gql` and `.graphql` files.
- Strict type safety for our client.
- Easy tooling & workflow
So our workflow will look something like this:
```
GrapQL API -> Schema -> Queries & Mutations -> Typescript -> Client
```
For this article we'll build a minuscule one pager using the [SpaceX Land GraphQL API](https://api.spacex.land/graphql/) to display some space travel data.
## Editor setup
The setup will be be for VSCode. For that we first install the [GraphQL extension](https://marketplace.visualstudio.com/items?itemName=GraphQL.vscode-graphql). This will enable us to have warnings and autocompletion inside of `gql` tags and `.graphql` files.
We need to add a `.graphqlrc.yml` file at the root with the following content:
```
schema: https://api.spacex.land/graphql/
```
## Writing Queries & Mutations
Now onto the real stuff.
We want to take our endpoint, generate types and queries from it that can then be used by Typescript safely. To do that we will:
1. Setup generators for Schema, Queries, Mutations & SDK.
2. Write some Queries & Mutations
3. Generate the SDK
4. Consume the SDK
### Setup
There is this amazing project called `@graphql-codegen` which is a collection of tools for helping you generating various things from GraphQL. Let's install:
```
# Generators
pnpm i -D @graphql-codegen/cli @graphql-codegen/typescript @graphql-codegen/typescript-operations @graphql-codegen/typescript-graphql-request
# For the SDK
pnpm i -D graphql graphql-request graphql-tag
```
I will assume my GraphQL stuff will live under `./src/lib/gql`
We will create a top level configuration file to handle all of our generation step called `codegen.yaml`. Ignore the `config` option for now, I will explain that later
```
schema: https://api.spacex.land/graphql/
documents: "src/**/*.graphql"
generates:
./src/lib/gql/gen.ts:
plugins:
- "@graphql-codegen/typescript"
- "@graphql-codegen/typescript-operations"
- "@graphql-codegen/typescript-graphql-request"
config:
maybeValue: "T"
typesPrefix: GQL
immutableTypes: true
useTypeImports: true
avoidOptionals: true
```
The property `schema` does not need an explanation.
`generates` has 3 plugin enabled, one for the general types, another for queries and mutations and the last one to generate us a ready to use SDK and will save it under `./src/lib/gql/gen.ts`.
`documents` is a glob that will find all our GraphQL files we write and generate the according code.
### Creating Queries
Now let's create a `src/lib/gql/root.graphql` file and write some queries, all autocompleted of course!
```
query LaunchpadsMany {
launchpads(limit: 10) {
id
name
location {
name
}
successful_launches
status
}
}
query LaunchByYear($year: String!) {
launches(find: { launch_year: $year }) {
mission_id
mission_name
launch_date_utc
rocket {
rocket_name
}
}
}
```
### Let magic do it's thing
```
pnpm exec graphql-codegen
```
This will look at all our custom queries and mutations and generate us a ready to consume SDK that is completely typed. Amazing!
### Leverage the new SDK
```
// src/lib/gql/index.ts
import { GraphQLClient } from 'graphql-request'
import { getSdk } from './gen'
const client = new GraphQLClient('https://api.spacex.land/graphql/')
export const SDK = getSdk(client)
```
```
import { SDK } from '$lib/gql'
const data = await SDK.LaunchByYear({ year: '2021' })
```
You can also use the generated types to explicitly set them
```
import { SDK } from '$lib/gql'
import type { GQLLaunchByYearQuery } from '$lib/gql/gen'
const data: GQLLaunchByYearQuery = await SDK.LaunchByYear({ year: '2021' })
```
Every thing is typed now, I can't pass a number to the `year` variable or use return data that does not exist. Typescript will error on me. This not only gives us autocompletion but also the safety of what we are doing.
### Configuration options
I promised I would come back to it at some point.
```
schema: ...
generates:
...
config:
maybeValue: "T"
typesPrefix: GQL
immutableTypes: true
useTypeImports: true
avoidOptionals: true
```
There are [many options](https://www.graphql-code-generator.com/plugins/typescript#config-api-reference) for the generators, but I think these are quite sensible defaults.
`maybeValue` is `T | null` as default, but since we only use our queries which are type safe we can just remove uncertainty and use the correct type straight away.
`avoidOptionals` same thing as the `maybeValue`, just with `prop:?`. Don't want that.
`typesPrefix` is useful if you have some own type specifications that you don't want to clash with. I like to prefix all my generated GraphQL stuff with `GQL` to keep it tidy.
`immutableTypes` i prefer using an immutable type, with basically adds a `readonly` to every property. This way we are sure we are not editing data on the client.
`useTypeImports` this uses `import type` whenever possible.
## Final thoughts
I hope this made your GraphQL life a bit easier, it definitely did for me and it's way more fun to consume GraphQL API this way. Also something worth mentioning is that you can use the `@graphql-codegen/typescript-generic-sdk` package instead of the `@graphql-codegen/typescript-graphql-request` if you want to do the network requests yourself. It's easy to use but if you don't really have a reason just stick with the `graphql-request` one I'd say.

View File

@ -0,0 +1,137 @@
---
title: 'Automate Github releases with Drone.'
date: '2020-01-29'
categories:
- 'coding'
tags:
- 'cd'
- 'drone'
coverImage: './images/franck-v-U3sOwViXhkY-unsplash-scaled-1.jpg'
---
If you have a project on github that has releases for code or binaries for example it might be a good idea to automate it. Not only this saved a lot of clicks and time, but also it makes releases predictable and therefore less prone to errors in the process.
For this article I will take my own [project](https://github.com/cupcakearmy/autorestic) as the example here, but of course this can be applied to any project, written in whatever language and/or framework.
Also I will base this guide on [Drone](https://drone.io/). But I'm sure there is the same workflow for jenkins/circle/whatever CI/CD system you are using.
This means I'm assuming you have a repository already running with Drone.
<figure>
![](images/franck-v-U3sOwViXhkY-unsplash-scaled-1.jpg)
<figcaption>
Photo by [Franck V.](https://unsplash.com/@franckinjapan?utm_source=unsplash&utm_medium=referral&utm_content=creditCopyText) on [Unsplash](https://unsplash.com/s/photos/robot?utm_source=unsplash&utm_medium=referral&utm_content=creditCopyText)
</figcaption>
</figure>
The first thing we will need is an access token for the Github API.
You can get them here [https://github.com/settings/tokens](https://github.com/settings/tokens). I called my `Drone` and you need to check the permissions for the repos as follows.
<figure>
![](images/Screenshot-2020-01-29-at-14.57.05.png)
<figcaption>
How to create a new token in Github
</figcaption>
</figure>
Copy the token and save it somewhere **safe**. You will see it only once.
We will add this token to our Drone repository settings. For that navigate to your drone instance and open the settings for the repository in question.
<figure>
![](images/Screenshot-2020-01-29-at-14.55.28.png)
<figcaption>
Add the token to Drone secrets
</figcaption>
</figure>
I've called my secret `github` and I have not allowed it in PRs. Otherwise a PR made by some random user could trigger a release. We don't want that.
Now it's time to edit our drone file and make everything automatic. The flow at the end will be as follows.
1. Code, commit and develop
2. When you are ready for the next release we create a tag
3. Once a tag is created and pushed drone will automatically build and release that code attached to the tag.
Simple right? Lets see how!
```
# .drone.yml
---
kind: pipeline
name: default
steps:
- name: build
image: node
pull: always
commands:
- yarn
- yarn run bin
when:
event: tag
- name: publish
image: plugins/github-release
pull: always
settings:
api_key:
from_secret: github
files: bin/*
checksum:
- sha512
note: CHANGELOG.md
when:
event: tag
---
kind: signature
hmac: 3b1f235f6a6f0ee1aa3f572d0833c4f0eec931dbe0378f31b9efa336a7462912
...
```
Lets understand what is happening here:
First I'm building my project. In this case this is a standalone typescript executable build by [pkg](https://github.com/zeit/pkg). The build binaries will be emitted into the `./bin` folder. But it really does not matter. Could be anything.
Secondly we tell the [Github release plugin](http://plugins.drone.io/drone-plugins/drone-github-release/) what files we want to include in the release. In my case this was everything inside the `bin` folder. This can also be an array.
```
files:
- dist/*
- bin/binary.exe
```
The `api_key` includes the token, which we load from a secret so that we don't simply put in the `.drone.yml` file, which could be a huge security issue!
The `checksum` setting is also amazing because as the name suggests the plugin automatically generates checksums for all the files. That is amazingly practical and there is no reason not to do that. You can choose a few hash functions but I would suggest simply going with `sha512`.
## So how do a trigger a release now?
Simple! First tag your code with the following command
```
git tag 1.2.3
```
Now push the tag and drone will be on its way
```
git push --tags
```
Thats it! Hope it made your release journey easier 🙂

View File

@ -0,0 +1,67 @@
---
title: 'Backup MongoDB inside of Docker the easy way'
date: '2019-08-15'
categories:
- 'coding'
tags:
- 'cli'
- 'docker'
coverImage: './images/tobias-fischer-PkbZahEG2Ng-unsplash-scaled.jpg'
---
Backing up a mongo instance is more confusing than it should be. Maybe you have run into a `the input device is not a TTY` or you simply don't know how to do it? Here are two 1-Liner to backup and restore a running mongo instance.
<figure>
![](images/tobias-fischer-PkbZahEG2Ng-unsplash-1024x497.jpg)
<figcaption>
Photo by [Tobias Fischer](https://unsplash.com/@tofi?utm_source=unsplash&utm_medium=referral&utm_content=creditCopyText) on [Unsplash](https://unsplash.com/search/photos/database?utm_source=unsplash&utm_medium=referral&utm_content=creditCopyText)
</figcaption>
</figure>
## Setup
First we define our mongo instance like below. Notice that instead of mapping the data directory onto our filesystem we have a native volume.
###### docker-compose.yml
```
version: '3.7'
volumes:
db:
services:
db:
image: mongo:3-xenial
restart: always
volumes:
- db:/data/db
ports:
- 27017:27017
```
Then start with `docker-compose up -d`.
## Backup
First we will do a backup of our running instance.
```
docker-compose exec -T db mongodump --archive --gzip --db mydb > dump.gz
```
The `-T` option is for enabling piping the output to our own machine. We also tell mongo to use the `--gzip` option to compress the file significantly.
Lastly we specify the `--db <database>` that we want to backup.
## Restore
Whenever we want to restore a db, or maybe seed it we can run the following:
```
docker-compose exec -T db mongorestore --archive --gzip < dump.gz
```

View File

@ -0,0 +1,98 @@
---
title: "Be your own (tiny) image CDN"
date: "2023-04-28"
---
Today, I want to share how to create and host your own image transformation service, much like the known [Imgix](https://imgix.com/) and [Cloudinary](https://cloudinary.com/). The aim is to have a powerful transformation server for images that caches, so images only need to be computed once.
The building blocks will be [imgproxy](https://github.com/imgproxy/imgproxy) and [nginx](https://nginx.org/). The former is a battle tested and fast image server with support for most image operations, while nginx should not need an introduction.
<figure>
![](images/meagan-carsience-QGnm_F_nd1E-unsplash1-1024x683.jpg)
<figcaption>
Photo by [Meagan Carsience](https://unsplash.com/@mcarsience_photography?utm_source=unsplash&utm_medium=referral&utm_content=creditCopyText) on [Unsplash](https://unsplash.com/photos/QGnm_F_nd1E?utm_source=unsplash&utm_medium=referral&utm_content=creditCopyText)
</figcaption>
</figure>
While imgproxy is the core of this operation, it does not support caching. This is intentional, as it's intended to be run behind a proxy. For that, nginx is the tool of choice, as it enables us to easily setup caching rules to avoid generating the same image twice in a given cache interval. Everything will be done in docker containers, but the concept, of course, extends to bare metal too.
## Setup
Imgproxy fortunately is very customisable and options can be passed by env variables, which is wonderful.
It's generally advised to use signed URLs if possible. In my case, there was no backbend that could sign them, so it was avoided. Whenever omitting signing, it is critical to limit the allowed sources to the minimum with `IMGPROXY_ALLOWED_SOURCES` so that it cannot be abused by other websites.
Below is docker file used. Required is only the `IMGPROXY_BIND` as otherwise nginx cannot connect to our image container. The other options are up to you and are just here for a quick setup.
```
# docker-compose.yaml
version: '3.8'
volumes:
cache:
services:
img:
image: darthsim/imgproxy
environment:
# Required for nginx
IMGPROXY_BIND: 0.0.0.0:80
# Security
IMGPROXY_MAX_SRC_RESOLUTION: 100
IMGPROXY_ALLOWED_SOURCES: https://images.example.org/
# Transforms
IMGPROXY_ENFORCE_WEBP: true
IMGPROXY_ENFORCE_AVIF: true
IMGPROXY_ONLY_PRESETS: true
IMGPROXY_PRESETS: default=resizing_type:fit,sm=size:250:250,md=size:500:500,lg=size:1000:1000
proxy:
image: nginx
ports:
- 80:80
volumes:
- ./proxy.conf:/etc/nginx/conf.d/default.conf:ro
- cache:/tmp
```
The more interesting part is the nginx configuration file below. In this case, we target 30 days as a cache TTL. This could be easily increased if we are only talking about static images.
```
# Set cache to 30 days, 1GB.
# Only use the uri as the cache key, as it's the only input for imageproxy.
proxy_cache_path /tmp levels=1:2 keys_zone=images:8m max_size=1g inactive=30d;
proxy_cache_key "$uri";
proxy_cache_valid 200 30d;
server
{
listen 80;
server_name _;
location /
{
proxy_pass_request_headers off;
proxy_set_header HOST $host;
proxy_set_header Accept $http_accept;
proxy_pass http://img;
proxy_cache images;
}
}
```
Here we are configuring a few things, so let's elaborate:
First a cache is configured at the location `/tmp`, with the name `images`, a maximum size of 1 gigabyte and the `inactive` parameter to 30 days.
For the cache key, we use only the `$uri` variable, as all the parameters that affect image generation are included in the path and makes therefore the image transformation unique.
Lastly, we tell nginx to cache all responses with code `200` for 30 days.
Another important trick is to strip all headers that reach the proxy. This is done by setting `proxy_pass_request_headers` and only passing the `Accept` header, as it's required for automatically determining the image format.

View File

@ -0,0 +1,79 @@
---
title: 'Cleanup downloaded Google Photos Takeout archives'
date: '2019-05-04'
categories:
- 'general'
tags:
- 'google-photos'
- 'google-takeout'
- 'icloud-photos'
- 'migration'
coverImage: './images/rayan-almuslem-1302778-unsplash-scaled.jpg'
---
Recently I've been taking my tin foil hat a bit more seriously and since I mostly live in the Apple ecosystem (yes, you can judge me) the iCloud Photos felt like a pretty good alternative. Yes, it's still a cloud but the content [is encrypted](https://support.apple.com/en-us/HT202303) and most importantly Apple has no real economical incentive on data mining you data. They are far ahead in terms of privacy. With that out of the way let's go! 🚀
TLDR: I wrote this [cleaning script](https://gist.github.com/CupCakeArmy/51070b311e6fd0a3f2d793bee3350ede) (tested only on macOS) to remove all duplicates from the [Google Takeout](https://takeout.google.com/) folders.
<figure>
![](images/rayan-almuslem-1302778-unsplash-1024x683.jpg)
<figcaption>
Photo by [Rayan Almuslem](https://unsplash.com/photos/_aPYcEKtDQ0?utm_source=unsplash&utm_medium=referral&utm_content=creditCopyText) on [Unsplash](https://unsplash.com/search/photos/camera?utm_source=unsplash&utm_medium=referral&utm_content=creditCopyText)
</figcaption>
</figure>
The process seemed easy at first. Google offers an awesome tools for exporting data out of their servers. It's called [Takeout](https://takeout.google.com/). So basically you select the Google Photos service and let them create the archives. Then after a few hours you can download them.
Now at the time I had ~40gb worth of pictures and videos saved in Googles cloud, however the archives I downloaded where about ~90gb. I started looking into it and a lot of photos where duplicates and edited versions that google was keeping. In addition the folders where full of JSON metadata.
**time for cleanup 🧹**
Fortunately for us there is the awesome `find` command that will save our lives. Removing metadata and duplicates from more than ~50k files is impossible by hand.
First we need to remove all `.json` files:
```
find ./my_takeout_folder -name "*.json" -type f -delete
```
Then all the duplicates that contain a `(1)` at the end of the file.
```
# macOS
find -E ./my_takeout_folder -regex ".*\([0-9]+\).*" -type f -delete
# Unix (Thanks to Stravos F. for pointing that out ❤️)
find ./my_takeout_folder -regextype posix-extended -regex ".*\([0-9]+\).*" -type f -delete
```
All the edited photos by google
```
find ./my_takeout_folder -name "*edited*" -type f -delete
```
And lastly remove all the empty folders.
```
find ./my_takeout_folder -type d -empty -delete
```
You probably will have multiple folders because you will have to download multiple archives. Simply unpack them all into one folder and run the scripts on to that folder.
If you are to lazy to run them manually just get this script I wrote
<script src="https://gist.github.com/CupCakeArmy/51070b311e6fd0a3f2d793bee3350ede.js"></script>
Then...
```
chmod +x ./clean
./clean my_folder_with_all_the_google_takouts
```
Finally just drag and drop into the Photos app.

View File

@ -0,0 +1,82 @@
---
title: "Create a QR code for Google Drive"
date: "2022-03-17"
---
So you want to make a QR code to a google drive file? It's actually quite easy, I'll show you!
## 1\. Upload the file and get the shared link
As shown in the video below the first thing is to upload your file (in this case a PDF) and create a sharable link.
<figure>
<figcaption>
Uploading and generating a link for a google drive file
</figcaption>
</figure>
## 2\. Convert the link to a download link
```
https://drive.google.com/file/d/1LZ09_aJnGy1aHY0DEuOEFGU4mon2ijir/view?usp=sharing
```
If we simply use the provided link (example above) it won't download the file, but create a preview of it.
If we want a direct download we need to change it to that below:
```
https://drive.google.com/uc?export=download&id=1LZ09_aJnGy1aHY0DEuOEFGU4mon2ijir
```
To summarise:
```
https://drive.google.com/file/d/<id>/view?usp=sharing
⬇️
https://drive.google.com/uc?export=download&id=<id>
```
Note that the _`<id>`_ part will be different for your file. The rest is the same.
## 3\. Create the QR Code
To create a QR code there is a very good free website called: [the-qrcode-generator.com](https://www.the-qrcode-generator.com/). Here you simply paste the link and get your QR Code.
<figure>
![](https://api.nicco.io/wp-content/uploads/2022/03/QR-Big.svg)
<figcaption>
Big QR code
</figcaption>
</figure>
## 4\. Make the QR code smaller and track clicks
If you want to have a smaller and cleaner QR code you can use a URL shortener like [Cuttly](https://cutt.ly/) to do so. With Cuttly the URL gets shorter and you can see how many people clicked on it. The new link and QR code then look something like this:
```
https://cutt.ly/CSonJs9
```
<figure>
![](https://api.nicco.io/wp-content/uploads/2022/03/QR-Small.svg)
<figcaption>
Small QR code
</figcaption>
</figure>

View File

@ -0,0 +1,96 @@
---
title: 'Going beyond NPM: meet Yarn & pnpm'
date: '2019-08-27'
categories:
- 'coding'
tags:
- 'javascript'
- 'node'
- 'npm'
- 'pnpm'
- 'yarn'
coverImage: './images/ruchindra-gunasekara-GK8x_XCcDZg-unsplash-scaled.jpg'
---
If you are a JS developer you probably use NPM multiple times a day without thinking about it. It's the default package manager which ships with node.
But have you wondered what if there was another way of managing your (probably too many 😉) packages? We will look at [yarn](https://yarnpkg.com/en/) and [pnpm](https://pnpm.js.org/) as worthy rivals.
**Update** _27 Aug 2019 @ 21:23_
As [this user](https://www.reddit.com/r/javascript/comments/cw64xt/going_beyond_npm_meet_yarn_pnpm/ey92a0i?utm_source=share&utm_medium=web2x) on reddit pointed out npm now supports offline installs too, so that part is the same for all three package managers. Also apparently the checksums, but I could now verify it.
**Update** _27 Aug 2019 @ 22:51_
If you are having troubles with pnpm try using `pnpm i shamefully-flatten`. Thanks to [this reddit user](https://www.reddit.com/r/node/comments/cw64qq/going_beyond_npm_meet_yarn_pnpm/ey9aa1v?utm_source=share&utm_medium=web2x).
For the lazy readers: [Jump to the conclusion here](#conclusion).
<figure>
![](images/ruchindra-gunasekara-GK8x_XCcDZg-unsplash.jpg)
<figcaption>
Photo by [Ruchindra Gunasekara](https://unsplash.com/@ruchindra?utm_source=unsplash&utm_medium=referral&utm_content=creditCopyText) on [Unsplash](https://unsplash.com/?utm_source=unsplash&utm_medium=referral&utm_content=creditCopyText)
</figcaption>
</figure>
You might wonder now: why? _Why should I bother reading this when NPM works perfectly_? Is this just another run to the latest framework? Don't worry: there are actual reasons you might want to switch.
#### Speed!!... or the lack of it?
The biggest issue that plagues npm is speed. Unfortunately even with the latest version (6) npm is slow. If you ever had to delete the node_modules folder to do a clean install on a bigger project you will know what I mean. Fans start to spin, laptop gets warm and you can go read an article while npm chews on the dependencies.
## Yarn to the rescue
Yarn came along in the early days and you definitely have heard about it wandering across Github. Back in the days (before npm 5 with the `package-lock.json`) Yarn addressed the issues with consistency by being the first to generate a lockfile (`yarn.lock`). This file could be checked in and the devs would have a consistent dependencies across multiple machines.
#### Speed
Yarn is often twice as fast as npm. It's truly impressive and you need to see it for yourself to believe it. The CLI output is also way more human-friendly.
#### Offline
Every package version is only downloaded once, so if you happen to loose connection or need to download the same package again you will gain a substantial speed boost since they are cached locally.
_See update at the top_.
#### yarn upgrade-interactive
This is incredible 😍. If you run `yarn upgrade-interactive` you get an interactive CLI where you can choose what packages to upgrade and which not. It's a simple thing, but one you cannot live without anymore if tried it once.
#### yarn why
Similar to the previous command this is a very handy cli goodie. simply run `yarn why some-package` and yarn will tell you why it was installed, from which dependency it came from, etc.
#### Lack of npx
Unfortunately Yarn lacks the `npx` equivalent of npm, which is the only drawback I encountered while using yarn. Other than that yarn is a very fast and a solid alternative to npm.
## PNPM: The underdog
I truly love this project so I might be biased. They basically implemented a thought I had a while back: **reuse the same packages across your computer**. Confused? Let me explain:
Have you ever measured the size of the your node_modules?
```
du -sh node_modules
# --> 816M node_modules
```
What?! 0.8Gb for a react-native project?!
Unfortunately that is a pretty common reality and **pnpm** aims to solve that.
PNPM links your packages with symlinks. This means that **the same version of a package only exists once** on your computer. If you ever install the same package twice, it will simply symlinked to your node_modules. 🚀
[On top of that it's even faster than yarn.](https://github.com/pnpm/benchmarks-of-javascript-package-managers)
#### So perfection is achieved? Let's all switch to pnpm?
Unfortunately it's not that easy. If you start a new project you can probably go with pnpm, but with existing projects I had some problems with building my apps. So it's definitely experimental at best and should not be used without rigorous testing as it might break your app. pnpm also supports npx with `pnpx`.
## Conclusion Time
<table><tbody><tr><td></td><td><strong>Speed</strong></td><td><strong>NPX</strong></td><td><strong>Offline</strong></td><td><strong>Well supported</strong></td></tr><tr><td>npm</td><td>🐌</td><td></td><td></td><td></td></tr><tr><td>yarn</td><td>🚄</td><td></td><td></td><td></td></tr><tr><td>pnpm</td><td>🚀</td><td></td><td></td><td></td></tr></tbody></table>
As you can see above there is no clear winner. NPM is the most compatible of course but really falls behind in terms of speed. **Yarn in my opinion is currently your best bet** and fallback to `npx your-command` when npx is needed.
pnpm is an incredibly cool tool but is not ready yet for production. With react-native I can cause problems, but with the "normal" stacks it works very good. I will use pnpm for my personal projects from now on.

View File

@ -0,0 +1,171 @@
---
title: 'How to avoid killing your MacBook / Laptop battery'
date: '2019-07-23'
categories:
- 'general'
tags:
- 'battery'
- 'laptop'
- 'macbook'
coverImage: './images/israel-palacio-ImcUkZ72oUs-unsplash-scaled.jpg'
---
As of May of 2020 this is no more relevant! macOS 10.15.5 finally addressed this issue by not charging the battery to 100% depending on battery temperature, health and so on 🚀🚀🚀
There are a lot if misleading wisdom out there about batteries (e.g. it is ok to leave your laptop plugged in). The reasons behind it are pretty interesting and not at all trivial. If you want to know why: keep reading.
**TLDR;** [Jump to the solution](#solution)
<figure>
![](images/israel-palacio-ImcUkZ72oUs-unsplash-scaled.jpg)
<figcaption>
Photo by [israel palacio](https://unsplash.com/@othentikisra?utm_source=unsplash&utm_medium=referral&utm_content=creditCopyText) on [Unsplash](https://unsplash.com/search/photos/electricity?utm_source=unsplash&utm_medium=referral&utm_content=creditCopyText)
</figcaption>
</figure>
> The worst situation is keeping a fully charged battery at elevated temperatures.
>
> batteryuniversity.com
Batteries are consumable items. This means the degrade over time and loose their ability to store energy. We will see how and why this happens and how to combat it.
## Why do batteries degrade?
In theoretical chemistry your battery should be able to last forever. But in the real world of course that's not possible. There are 2 main killers for batteries:
1. Heat
2. Cycles (Especially above ~80%)
### Killer #1: Heat
With heat the internal materials of the battery start to loose their chemical form and therefore their capacity. Below is a table that illustrates how batteries react to temperature exposure.
_Estimated recoverable capacity when storing Li-ion for one year at various temperatures._
<table><tbody><tr><td><strong>Temperature</strong></td><td><strong>40% charge</strong></td><td><strong>100% charge</strong></td></tr><tr><td>0°C</td><td>98%&nbsp;(after 1 year)</td><td>94%&nbsp;(after 1 year)</td></tr><tr><td>25°C</td><td>96%&nbsp;(after 1 year)</td><td>80%&nbsp;(after 1 year)</td></tr><tr><td>40°C</td><td>85%&nbsp;(after 1 year)</td><td>65%&nbsp;(after 1 year)</td></tr><tr><td>60°C</td><td>75%&nbsp;(after 1 year)</td><td>60%<br>(after 3 months)</td></tr></tbody></table>
As we can see, a heated battery basically is a lost cause, especially when fully charged.
Unfortunately **there is little we can do for the heat issue** since the thermal design of our Laptop is fixed and the manufacturer will choose where to place the battery and if and how to cool it.
### Killer #2: Cycles
Over this one we have much more control. A cycle varies in definition, but basically it is a discharge, followed by a charge.
Why are cycles bad? Well whenever we move electrons around our battery either by using it or while charging, the chemical material is subjected to wear. Why exactly this happens is beyond my understanding of chemistry, so I won't try to explain it since I would probably do it wrong.
What I can tell you is how to charge and use your battery in the correct manner:
1. Ideally leave your battery between 30% and 80%
2. Don't charge over 80% if not strictly required for a long trip or so.
### Don't believe me, trust the data
<figure>
![](images/DST-cycles-web2.jpg)
<figcaption>
Capacity loss as a function of charge and discharge bandwidth. [Source](https://batteryuniversity.com/index.php/learn/article/how_to_prolong_lithium_based_batteries)
</figcaption>
</figure>
As we can observe above, all the tested bandwidth that regularly charged to a full 100% degraded the fastest. Don't do that.
Part of the problem is that as you can see in the graphic below is that while **the first 80% of the capacity is charged easily and quick, the last 80% to 100% have an exponential curve**. Those last percentages are really taxing on the battery because you are trying to stuff the last electrons inside an almost full battery. Imagine you stuffing a last bag into an almost full garbage. You will need to push it.
Making matters worse, the battery will heat up during the last steps of charging because of the strain that is undergoing. This only adds to the problem, since as we learned above heat is incredibly bad for capacities.
<figure>
![](images/Battery-Charge-Voltage-vs-Time.png)
<figcaption>
Charging graph for Lithium Batteries.
Graph by [batteryuniversity.com](https://batteryuniversity.com/index.php/learn/article/charging_lithium_ion_batteries) - Colorised by [Android Authority](https://www.androidauthority.com/maximize-battery-life-882395/)
</figcaption>
</figure>
This is also the reason because electric vehicles only charge up to 80%. It increases the lifespan of the battery significantly. On the other hand, consumer **products like laptops and phones are more about selling you maximum battery life.** What they don't tell you is **how quick that advertised battery life is going to last after 6 months** of usage.
### Myth: It's ok to keep you laptop plugged in
This is a misconception that arose in recent years. While it is not completely wrong, it overlooks some important aspects.
What is true? Modern laptops and phones don't overcharge the battery and will switch to using only the powered cable as the source. **However** at some point the battery will dip below 97% and the laptop will start charge it again to 100%. Assuming you use your laptop for work the whole day, this will happen multiple times daily. **It will break it**.
## Solution
### For MacBooks (magsafe):
Put a piece of paper/cloth on your middle connector of your MacBook Magsafe charger **whenever your laptop plugged in for long periods**.
<figure>
![](images/howto.jpg)
<figcaption>
How-To protect the battery with Magsafe chargers
</figcaption>
</figure>
**Update:** I tried new methods, the one that seems the most practical is to use a little piece of tape that you can bend in front of the connector when needed.
##### Updated method with tape
Put a piece of tape on your middle connector. For simplicity you can just cover 3 pins and leave the 2 outside pins (does not matter which side) free.
Now you can easily switch between loading the battery or just working on power **whenever your laptop plugged in for long periods**.
<figure>
![](images/howto-1.jpg)
<figcaption>
Same method, just with tape. Much easier to use.
</figcaption>
</figure>
<figure>
![](images/status.jpg)
<figcaption>
Status of battery after modification
</figcaption>
</figure>
This will prevent your laptop from using **and** charging your battery while using it for a whole day.
**Credits for the hack**: [https://superuser.com/a/1130375](https://superuser.com/a/1130375)
### Laptops with removable batteries:
Simply remove the battery when using laptop for long periods. This will prevent the heat of the laptop being transferred to the battery and it won't charge over and over again to 100%.
#### Sources
- [https://batteryuniversity.com/index.php/learn/article/charging_lithium_ion_batteries](https://batteryuniversity.com/index.php/learn/article/charging_lithium_ion_batteries)
- [https://batteryuniversity.com/learn/article/bu_808b_what_causes_li_ion_to_die](https://batteryuniversity.com/learn/article/bu_808b_what_causes_li_ion_to_die)
- [https://batteryuniversity.com/index.php/learn/article/how_to_prolong_lithium_based_batteries](https://batteryuniversity.com/index.php/learn/article/how_to_prolong_lithium_based_batteries)
- [https://batteryuniversity.com/index.php/learn/article/do_and_dont_battery_table](https://batteryuniversity.com/index.php/learn/article/do_and_dont_battery_table)
- [https://www.electricbike.com/how-to-make-lithium-battery-last/](https://www.electricbike.com/how-to-make-lithium-battery-last/)
- [https://www.androidauthority.com/maximize-battery-life-882395/](https://www.androidauthority.com/maximize-battery-life-882395/)

View File

@ -0,0 +1,340 @@
---
title: 'How to bring your neural network to the web'
date: '2020-02-10'
categories:
- 'coding'
tags:
- 'ai'
- 'keras'
- 'machine-learning'
- 'tensorflow'
coverImage: './images/natasha-connell-byp5TTxUbL0-unsplash-scaled-1.jpg'
---
Artificial intelligence, neural networks, machine learning. I don't know which of them is the bigger buzzword. If we look past the hype there are some actually very interesting use cases for machine learning in the browser.
**For the lazy that simply what to just to the source code**
[Here is the git repo](https://github.com/cupcakearmy/mnist) for you :)
**Or simply go to the [finished website](https://mnist.nicco.io/)**
Today we will look on how to train a simple mnist digit recogniser and then export it into a website where we then can see it in action. Therefore this article will be split into three parts
1. Training
2. Export & import the pre-trained model into a website
3. Build a simple website where we can use the model.
Also I am not going to explain what machine learning is, as there are enough guides, videos, podcasts, ... that already do a much better job than I could and would be outside the scope of this article.
<figure>
![](images/natasha-connell-byp5TTxUbL0-unsplash-scaled-1.jpg)
<figcaption>
Photo by [Natasha Connell](https://unsplash.com/@natcon773?utm_source=unsplash&utm_medium=referral&utm_content=creditCopyText) on [Unsplash](https://unsplash.com/s/photos/brain?utm_source=unsplash&utm_medium=referral&utm_content=creditCopyText)
</figcaption>
</figure>
So the first thing we need to understand is that we will not train the model in the browser. That is a job for GPUs and the goal here is only to use a pre-trained model inside of the browser. Training is a much more resource intensive task than simply using the net.
## Training the model
So, the first step is to actually have a model. I will do this in tensorflow 2.0 using the now included keras api. This means Python 🎉
The code below is basically an adapted version of the [keras hello world example](https://keras.io/examples/mnist_cnn/).
If you want to run the code yourself (which you should!) simply head over to [Google Colab](https://colab.research.google.com), create a new file and just paste the code. There you can run it for free on GPUs which is pretty dope!
```
from tensorflow.keras.datasets import mnist
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, Dropout, Flatten
from tensorflow.keras.layers import Conv2D, MaxPooling2D
from tensorflow.keras.utils import to_categorical
(x_train, y_train), (x_test, y_test) = mnist.load_data()
# Reshaping for channels_last (tensorflow) with one channel
size = 28
print(x_train.shape, x_test.shape)
x_train = x_train.reshape(len(x_train), size, size, 1).astype('float32')
x_test = x_test.reshape(len(x_test), size, size, 1).astype('float32')
print(x_train.shape, x_test.shape)
# Normalize
upper = max(x_train.max(), x_test.max())
lower = min(x_train.min(), x_test.min())
print(f'Max: {upper} Min: {lower}')
x_train /= upper
x_test /= upper
total_classes = 10
y_train = to_categorical(y_train, total_classes)
y_test = to_categorical(y_test, total_classes)
# Make the model
model = Sequential()
model.add(Conv2D(64, (3, 3), activation='relu', input_shape=(size,size, 1), data_format='channels_last'))
model.add(Conv2D(32, (3, 3), activation='relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.25))
model.add(Flatten())
model.add(Dense(128, activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(total_classes, activation='softmax'))
model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])
# Train
model.fit(x_train, y_train,
batch_size=32,
epochs=12,
verbose=True)
score = model.evaluate(x_test, y_test, verbose=0)
print('Test loss:', score[0])
print('Test accuracy:', score[1])
```
We can run this and we will get a pretty good accuracy. The MNIST dataset ist not very hard to train.
## Export the model
Now the conventional way to save a model is to use the `model.save("model.h5")` method provided by keras. This uses the h5 file format.
Unfortunately this is not compatible with tensorflow-js. So we need another way.
There is a package called tensorflowjs for python (confusing right? 😅) that provides the functionality we need
```
import tensorflowjs as tfjs
tfjs.converters.save_keras_model(model, './js')
```
It save the model data inside the `./js` folder ready to be used.
Inside there you will find a `model.json` that basically describes the structure of the model and something like `group1-shard1of1.bin` that contains the fitted weights.
## Import the model
Now we are ready to import that. First we need to install the `@tensorflow/tfjs` package.
```
import * as tf from '@tensorflow/tfjs';
let model
tf.loadLayersModel('/model.json').then(m => {
model = m
})
```
Ok how do I use that now?
```
const tensor = tf.tensor(new Uint8Array(ourData), [1, 28, 28, 1])
const prediction = model.predict(tensor)
```
**What is happening here?**
In order to predict a value we first need a tensor (vector) the same shape as our original input with which we trained the model with. In our case that is 1x28x28x1.
Also we will convert our pixel data into a `Uint8Array`.
## Using the canvas element to draw and predict numbers
I'm not gonna talk about what bundler, etc. I'm using. If you interested simply have a look at the [git repo](https://github.com/cupcakearmy/mnist).
First lets write some basic html for the skeleton of our page.
```
<html>
<head>
<style>
* {
box-sizing: border-box;
font-family: monospace;
}
html,
body {
padding: 0;
margin: 0;
height: 100vh;
width: 100vw;
display: flex;
justify-content: center;
align-items: center;
}
body>div {
text-align: center;
}
div canvas {
display: inline-block;
border: 1px solid;
}
div input {
display: inline-block;
margin-top: .5em;
padding: .5em 2em;
background: white;
outline: none;
border: 1px solid;
font-weight: bold;
}
</style>
</head>
<body>
<div>
<h1>MNIST (Pretrained)</h1>
<canvas id="can" width="28" height="28"></canvas>
<br />
<input id="clear" type="button" value="clear">
<br />
<input id="test" type="button" value="test">
<br />
<h2 id="result"></h2>
<a href="https://github.com/cupcakearmy/mnist">
<h3>source code</h3>
</a>
</div>
<script src="./tf.js"></script>
<script src="./canvas.js"></script>
</body>
</html>
```
Next we need come short code for drawing on a canvas.
The code is adapted from [this stackoverflow answer](https://stackoverflow.com/a/8398189) and reduced to the only the basics we need.
In essence it's a canvas that listens on our mouse events and fills the pixels with black. Nothing more.
```
/* jslint esversion: 6, asi: true */
var canvas, ctx, flag = false,
prevX = 0,
currX = 0,
prevY = 0,
currY = 0,
dot_flag = false;
var x = "black",
y = 2;
function init() {
canvas = document.getElementById('can');
ctx = canvas.getContext("2d");
w = canvas.width;
h = canvas.height;
canvas.addEventListener("mousemove", function (e) {
findxy('move', e)
}, false);
canvas.addEventListener("mousedown", function (e) {
findxy('down', e)
}, false);
canvas.addEventListener("mouseup", function (e) {
findxy('up', e)
}, false);
canvas.addEventListener("mouseout", function (e) {
findxy('out', e)
}, false);
window.document.getElementById('clear').addEventListener('click', erase)
}
function draw() {
ctx.beginPath();
ctx.moveTo(prevX, prevY);
ctx.lineTo(currX, currY);
ctx.strokeStyle = x;
ctx.lineWidth = y;
ctx.stroke();
ctx.closePath();
}
function erase() {
ctx.clearRect(0, 0, w, h);
}
function findxy(res, e) {
if (res == 'down') {
prevX = currX;
prevY = currY;
currX = e.clientX - canvas.offsetLeft;
currY = e.clientY - canvas.offsetTop;
flag = true;
dot_flag = true;
if (dot_flag) {
ctx.beginPath();
ctx.fillStyle = x;
ctx.fillRect(currX, currY, 2, 2);
ctx.closePath();
dot_flag = false;
}
}
if (res == 'up' || res == "out") {
flag = false;
}
if (res == 'move') {
if (flag) {
prevX = currX;
prevY = currY;
currX = e.clientX - canvas.offsetLeft;
currY = e.clientY - canvas.offsetTop;
draw();
}
}
}
init()
```
And not the glue to put this together is the piece of code that listens on the "test" button.
```
import * as tf from '@tensorflow/tfjs';
let model
tf.loadLayersModel('/model.json').then(m => {
model = m
})
window.document.getElementById('test').addEventListener('click', async () => {
const canvas = window.document.querySelector('canvas')
const { data, width, height } = canvas.getContext('2d').getImageData(0, 0, 28, 28)
const tensor = tf.tensor(new Uint8Array(data.filter((_, i) => i % 4 === 3)), [1, 28, 28, 1])
const prediction = model.predict(tensor)
const result = await prediction.data()
const guessed = result.indexOf(1)
console.log(guessed)
window.document.querySelector('#result').innerText = guessed
})
```
Here we need to explain a few things.
`canvas.getContext('2d').getImageData(0, 0, 28, 28)` simply returns a flattened array of the pixels from the point (0,0) to (28,28).
Then, instead of simply passing the data to the tensor. we need to do some magic with `data.filter` in order to get only every 3rd pixel. This is because our canvas has 3 channels + 1 alpha, but we only need to know if the pixel is black or not. We do this by simply filtering for the index mod 4
```
data.filter((_, i) => i % 4 === 3)
```
Lastly we need to interpret the result. `prediction.data()` return an array with 10 items. Because we have trained it that way that we only have 10 possible outcomes. 10 Digits right?
Well in that case we simply search in which position in the array we have a 1 and the index is out solution.
We search for a 1 because we only have floats from 0 to 1. So 1 is the maximum.
I hope this helped you understand the process better. It was pretty confusing at first for me too 😬

View File

@ -0,0 +1,222 @@
---
title: "How to search in the JAM"
date: "2020-12-06"
categories:
- "coding"
tags:
- "jam-stack"
- "lunr"
- "search"
- "svelte"
---
So a lot (me included) now are building JAM stack landing pages, shops, full-stack apps, etc. and while you can have a backend of course not all of them have. For those who don't: **How do we search?**
So there is the obvious [Google Programmable Search Engine](https://programmablesearchengine.google.com/about/) but that looks bad and it not really customizable. The results are very good, it's google after all. However for those who want something more custom: Here's one way how.
A working example can found right here [nicco.io/search](https://nicco.io/search) 😉
We will look at the following:
1. How to implement the search
2. Search Accuracy & Precision
3. Performance & Size
<figure>
![Telescope](images/uriel-soberanes-gCeH4z9m7bg-unsplash-991x1024.jpg)
<figcaption>
Photo by [Uriel Soberanes](https://unsplash.com/@soberanes?utm_source=unsplash&utm_medium=referral&utm_content=creditCopyText) on [Unsplash](https://unsplash.com/s/photos/telescope?utm_source=unsplash&utm_medium=referral&utm_content=creditCopyText)
</figcaption>
</figure>
We can't rely on a backend as discussed above, so the magic will happen at build time, like everything in the JAM-verse.
I've decided to go with the free and open source [lunr.js](https://lunrjs.com/) which is a simple but still quite powerful search engine that can run in the client.
```
const idx = lunr(function () {
this.ref('name')
this.field('text')
this.add({ name: 'A', text: 'Lorem...' })
})
const results = idx.search('Lor*')
```
The first question that probably will pop out in your head is: "How will lunr be able to know what is on our website?" Here is where our work begins.
## Roadmap
1. Aggregate all the data of your site
2. Prebuild the index and make it available as static JSON
3. Load `lunr.js` into your site and start searching
## Preparing the Index
So I'm using [Sapper](https://sapper.svelte.dev/) for this blog so the examples will be based on it, but the same principle applies to all JAM tech.
First we need to aggregate all our data. In my case this means all the single pages, blog entries, projects and works. So I created a `/src/routes/search.json.js` file and got to work.
```
import lunr from 'lunr'
import { getAll } from '../lib/wp'
function removeHTML(s) {
return s.replace(/<.*?>|\s+|&#\d+;/g, ' ').trim()
}
async function convertForIdx(type, fields = []) {
// Load the data from Wordpress
const items = await getAll(type)
// Map only the fields we need and are relevant
const defaults = ['title', 'content', 'slug']
return items.map((item) => ({
url: `${item.type}/${item.slug}`,
data: [...defaults, ...fields].map((field) => removeHTML(item[field])).join(' '),
}))
}
export async function get(req, res) {
const all = await Promise.all([
convertForIdx('projects', ['description']),
convertForIdx('pages'),
convertForIdx('posts'),
convertForIdx('works', ['role']),
])
const idx = lunr(function () {
this.ref('url')
this.field('data')
all.flat().forEach((doc) => this.add(doc))
})
res.setHeader('Content-Type', 'application/json')
res.end(JSON.stringify(idx))
}
```
First I get all the data from the Wordpress backend and for each item I select at least the `title` and `content` as I want them to be searchable. Then we remove any html tags with a dirty regexp and finally we build the index.
When we call `JSON.stringify(idx)` the precomputed index will be serialized to JSON. Otherwise every client would had to compute that on their CPU, wasting cycles and possibly battery. We don't want that.
Now I have the "search model" ready. You can have a look: [nicco.io/search.json](https://nicco.io/search.json)
## Integrating the search
It's time to integrate the search into the actual website 🚀
```
<script context="module">
export async function preload() {
const prebuilt = await this.fetch(`/search.json`).then((res) => res.json())
return { prebuilt }
}
</script>
<script>
import lunr from 'lunr'
import SearchResult from '../components/SearchResult.svelte'
export let prebuilt
let needle
let results = []
async function search(needle) {
if (!needle || !idx) {
results = []
} else {
let found = idx.search(needle + '~1')
if (!found.length) found = idx.search(needle + '*')
results = found.slice(0, 20)
}
}
$: idx = lunr.Index.load(prebuilt)
$: search(needle)
</script>
<input bind:value={needle} placeholder="needle" />
<ul>
{#each results as result (result.ref)}
<SearchResult {result} />
{/each}
</ul>
```
The first thing we do is load our preloaded `/search.json` and loading into an instance of `lunr`. This only need to happen once, once the index is loaded we ready to go.
```
const idx = lunr.Index.load(prebuilt)
```
For the searching itself `lunr` has quite a [few options](https://lunrjs.com/guides/searching.html). The most relevant for me where the wildcard and fuzzy search. While wildcard is good for when we don't have completed a word yet, fuzzy helps us with typos.
```
const fuzzy = idx.search(needle + '~1') // foo~1
```
While not explicitly said in the docs I'm guessing they use the [Levenshtein Distance](https://en.wikipedia.org/wiki/Levenshtein_distance), which means `~1` will replace at most 1 char.
```
const wildcard = idx.search(needle + '*') // fo*
```
Wildcard are straight forward. `lunr` supports any kind: `*oo`, `f*o` and `fo*`.
The result is an array with the `ref` field so you can find the related item and a `score`. They are already sorted by score, so basically you just need to write a for loop.
## Search Quality
Now the accuracy and precision are of course on par with Google, but way good enough for a blog or a smaller site. However in 1h you can add search to your JAM site without much work and you stay google free.
Also this approach gives you all the artistic liberties over the design.
## Performance & Size
Since we are prebuilding and packaging the whole site into one big `JSON` file it's worth taking a look at the size of the index.
For this I took the [Iliad by Homer](https://gutenberg.org/ebooks/6130) and slitted it up into different amount of pieces to simulate the amount of pages. At the same tame, the more pieces, the smaller the single content on one "page".
Please not that it's ~1mb of plain text so it's quite a lot.
You can get the source code for the "test" [here](https://gist.github.com/cupcakearmy/242b54ee6b1a914896390c91846aa4d4).
### Variable size documents
<figure>
![](https://api.nicco.io/wp-content/uploads/2020/12/Lunr-Index-Size-Compresion.svg)
<figcaption>
Graph of Lunr Index size
</figcaption>
</figure>
As you can see, with `1000` each around `1.15k` in size we end up with a compressed size of `563 KiB` which starts to get big.
### A more real example
Here is an example where each document is around `10k` in size. Roughly double the text amount needed for this blog post. Then we add an ever increasing amount of documents to the index and watch it grow.
<figure>
![](https://api.nicco.io/wp-content/uploads/2020/12/Lunr.js-Index-Size-10k-Document-Size.svg)
<figcaption>
Chart of 10k sized documents building the index.
</figcaption>
</figure>
The results are very different of course. Please note the the second graph has a logarithmic scale! If we compare the compressed size at `500` documents we have `494kb @2.3k/doc` vs `1.09MiB @10k/doc`. Basically double, which is not that bad if we consider that the documents are around 5 times bigger.
Hope you enjoyed and learned something, take care ❤️

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:54ef9ac3fb65009c53d55f103eadb1f6c1b9ac9cb93019f161d5c0d842576e61
size 191614

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:ee60b8040150d154afd21faf3112421b9b43cacb0626a5b4c53c0590c69626a0
size 151085

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:01d67cb02737ac3cbfa236c4fed9fa4fc053a495a1d64d3d3aa44a751d1ca73e
size 80573

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:767f3552e0f0244b006c0bcb5a8d3c39b2fb83d1eb008cd5abb6d8c8012c11da
size 182432

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:f43dfc65d7f710d7cfe7f21d168c5142c450b95b363e23f340d31135676bedc4
size 33825

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:1e65961b86eb1febbe48eb9292c9a54eebdb61314a7b1223d7c9112423477dcf
size 175459

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:e83975d7ec576ef6ca6c4fc7a007fd82d8066cdfc42ee097527f989ccf025756
size 2027604

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:9f831b45aa91bc87f6882e64a702a8a3b6224a1f6a4bf33f8c21e46053409eed
size 68828

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:f1dfeb6c5d59b355b38a50a0888b178e9fa9fa93bd0d26dd94ffd87ecbc0fae3
size 136338

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:a4a5b54dfff9307f3460189e6e80eedd6c80ee64eb3ae25bf5c7df3c3cccf882
size 56274

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:b0f241b1da4e817d415f8984ce54830276b0631042b31815a5569dd6937eb2ad
size 258865

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:2b5d821660053d05743eb6439d652e77a3d4699a45884a9bce88169835c55c8a
size 564881

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:5db61551895b9969afdb1356560f03c1918faca5e68169b6ac2d200ce700ce90
size 73938

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:88c62af32b444a6928c85e8f31cad2f4bef8ebed33893b0a28894d6f87593599
size 42234

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:eb058e043414fe65d83876644ed84a7e2e336ad8b1b4d771062db8494c036e79
size 101970

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:2ca0617aa7989fc9540df159acfe986d2aff6d8d6060f56747ed983b5d5be5fb
size 29477

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:b70f8cacc7eaa9ec3127c96cf3d3fecddda61a35313b8fd6f42622d1f2983858
size 50254

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:b70f8cacc7eaa9ec3127c96cf3d3fecddda61a35313b8fd6f42622d1f2983858
size 50254

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:641d3e8a3529e6e026773d16bc105b71a0ae27d8361faada37d52551ef198a0f
size 688323

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:e6b4a543a812755c5b899512478ce01fba1baea6502cffec3f9f46dc924fab30
size 178827

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:ef1c569cdcf9851539f06176d0c9e0debc52875976ff330b1cc6971572fe1404
size 665199

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:18e276ce5aa34268120c268de0f098dc07b0c0bb095d39479d3341af472acf7f
size 180270

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:641e531b6383c8552a05586e2ac44c53718c0b6130d72f23dcb8edf74ee4b3dd
size 794403

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:9621fd53443f20b7811c86005dc1c9fc4858dddd4a04c4b9df977592b1a58f04
size 67338

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:2f1402d168936c5ef8b97bc4f30afe3d4bae64136feb8d85b8e85bc15d3810d3
size 287558

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:5794d60199ad3f80242f5291b56e98d5d8278bde2d0ed11e3c001017140f0d6e
size 30457

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:40588829e34a45f71777c36e1dfdc9078bdf88d3196051034d20249d3a0ea539
size 104152

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:f0fa3d4bb042056f6e9adfdf0829f603b4b5771c7a724b862fe275b57924ca23
size 98883

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:5e7b72e16d3f5bcd368eb3b39078e32d0ef904023b7984f32fa2d0e364109479
size 43477

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:48603b5e68e25cce1593bf8578f5cc882c1612c213d0c3041312f018087a9a78
size 191775

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:3a3ed4f673a36c253f694b9590052125c52f2dfdecfa4cce27df5fe33b7f380d
size 84760

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:ebc09fc3389fdd05f9d559692c3d24eb7bd99adba36cabfdcf707975670c2474
size 483287

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:7856b748cc26c928c9fd5d43af23d43f72a6f11bf277685ef4b63ad8ada0528e
size 495251

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:517f9417837a532be4d3830cdd1e5857582ff85299bd14ab6f8aa52278aed3bd
size 62672

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:366666945890876b458c89ddfb94d4bab8e02f686a08b1d3bc04f0ed0e03e1cc
size 272668

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:8cf645981a817db0c7c5e92723d1c1104390a12dc4f68fa8bb82127e98352020
size 392117

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:a3f3295dd8580c51c510f34b5c5252c56d77be75d1a93ce83583c6a4308e636f
size 4596995

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:a326b48b5b9355bdef2a43fe5298559d45ad645f911519b9e398f981a1390e80
size 318810

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:1be1853bbb1056dd892fec171870e7bbe6108c4eab7309d13d594dd9b9c5ec28
size 383858

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:418f648ed44631401d9b4887c951b1db562cb02d981258f410768284dccd4945
size 160637

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:1be1853bbb1056dd892fec171870e7bbe6108c4eab7309d13d594dd9b9c5ec28
size 383858

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:394cb4b320efe0eaafc527569f2e04e57f25f6ca7ad70a6cc951af981012be2a
size 587956

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:821eb3943493f35fbdb6881c48ab8c21870e1c7a11f11321e1bb7ee7ec0b6e16
size 249493

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:7b41d27d824c56267c2cb666d906c1bede2fdaa1de74f9957e26d502c50cbc2b
size 1115156

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:39057dd4352501b6a8ad5aa62f28662cc0b1310fbbf03c5ad3ee67d0ca3abb9a
size 143009

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:8aaebdf6ec81ea65d61c9db30a93e0e6da65c06b874a614393d260ef135c3ee9
size 170032

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:1e8724656a1c04e896de528cc52d2418de73b5dbba1eb94062c826a212b18f19
size 205615

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:728917127520026b290af0348ef70dee6d36dbd3ec3f72b76d3c2b1fca3bdfdb
size 1205666

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:24f5c8a7e2fce080c7f9090f9db4f8ae5d2644f08eba8671da802bd76880e679
size 523148

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:3a7c6f461682a6a5d127655c2d67b90e1a1293c3fbe9bca763db8ab855e1c534
size 907098

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:eef5e387e8c930d6eff083bc6b460613253d1d808147515a0430bdbffac3ccfe
size 56082

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:82a2dfcd574fef01451a2e62c5efa1975445682c70588bca4e06f1421be4c24d
size 225187

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:1be1853bbb1056dd892fec171870e7bbe6108c4eab7309d13d594dd9b9c5ec28
size 383858

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:16f73c9118460e939f16070572407482c45a885a5f28163d3fe21a1ae63efc4f
size 547290

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:a0e78000dcffc29f4e60a546ff72c0b038165b6bb6ba6c2a75fc036628bd50a6
size 75268

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:bdd3909cf6516e603c072d23f1ba8a9394dc29cfdaff787c0a6ecd3ce0083c99
size 351535

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:864ac4e4da80d30f45124e0f377e2453b843017f8442f8b2858f644d3076f1d6
size 110652

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:012f5cffade95b8952c6da835c5ec9badacce4ab222b8cf6a59ce2d62fb2fd4a
size 645501

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:031b2833eaca905035d9bcee4462ab92d6388ccbeffde7ef775d792231726fbf
size 40933

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:500b324d99022e3410afa467743fff186a85700462e66487c11f5b7c3722e9d1
size 244788

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:846aa0ef3c0ad69b286514962c0cf1b9504f0fc74e975f592cfbe79f06ed5076
size 40291

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:08eb7d31540d64f49de77452565c48015bc4da3e5753ba9ed93ad19e70eaef48
size 190710

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:f77858db14fe890542ef1c18b112f144f4bdf5a1f9009cdcfa0f9f80eab70c55
size 19395

Some files were not shown because too many files have changed in this diff Show More