Huge number of changes, upgrade to next 15, add loads of pages, auth, add ci, loads of clean up, a db for images etc

This commit is contained in:
2024-10-12 00:35:10 +01:00
parent 7f88af8ee3
commit 81d2cae9c7
42 changed files with 6511 additions and 1440 deletions

6
.env
View File

@@ -1,6 +0,0 @@
NEXTAUTH_SECRET=v7cl92wK4Qdrdr1Jrr0JVl1qna4rIxFsY+T7X+8w0wM=
NEXTAUTH_URL=https://3000.vscode.home.joemonk.co.uk
NEXT_COGNITO_CLIENT_ID=6lt3p9f2puu583pso84b9b1asu
NEXT_COGNITO_CLIENT_SECRET=7mv13jembkcimd2qavlsqci5kgtihqhcvned5r54b46qpusl8o4
NEXT_COGNITO_ISSUER=https://cognito-idp.eu-west-2.amazonaws.com/eu-west-2_qwrNOlWgg

View File

@@ -0,0 +1,30 @@
name: Build and deploy
run-name: Build and deploy
on:
push:
# branches:
# - main
jobs:
deploy:
runs-on: ubuntu-latest
steps:
-
name: Checkout
uses: actions/checkout@v4
-
name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
-
name: Login to private registry
uses: docker/login-action@v3
with:
registry: '${{ variables.server_url }}/${{ variables.repository }}'
username: ${{ secrets.REGISTRY_USERNAME }}
password: ${{ secrets.REGISTRY_PASSWORD }}
-
name: Build and push
uses: docker/build-push-action@v6
with:
push: true
tags: '${{ variables.server_url }}/${{ variables.repository }}:latest'

32
.vscode/launch.json vendored Normal file
View File

@@ -0,0 +1,32 @@
{
"version": "0.2.0",
"configurations": [
{
"name": "Next.js: debug server-side",
"type": "node-terminal",
"request": "launch",
"command": "npm run dev"
},
{
"name": "Next.js: debug client-side",
"type": "chrome",
"request": "launch",
"url": "https://3000.vscode.home.joemonk.co.uk/"
},
{
"name": "Next.js: debug full stack",
"type": "node",
"request": "launch",
"program": "${workspaceFolder}/node_modules/.bin/next",
"runtimeArgs": ["--inspect"],
"skipFiles": ["<node_internals>/**"],
"serverReadyAction": {
"action": "debugWithEdge",
"killOnServerStop": true,
"pattern": "- Local:.+(https?://.+)",
"uriFormat": "%s",
"webRoot": "${workspaceFolder}"
}
}
]
}

View File

@@ -1,3 +1,6 @@
{
"editor.tabSize": 2,
"yaml.schemas": {
"https://json.schemastore.org/github-workflow.json": "file:///workspace/next-portfolio/.gitea/workflows/deploy.yaml"
},
}

View File

@@ -1,4 +1,4 @@
FROM node:18-alpine AS base
FROM node:22-alpine AS base
# Install dependencies only when needed
FROM base AS deps

BIN
db.sql Normal file

Binary file not shown.

View File

@@ -1,20 +1,33 @@
import million from "million/compiler";
import createMDX from '@next/mdx'
import createMDX from "@next/mdx";
/** @type {import('next').NextConfig} */
const nextConfig = {
pageExtensions: ['js', 'jsx', 'md', 'mdx', 'ts', 'tsx'],
swcMinify: true,
pageExtensions: ["js", "jsx", "md", "mdx", "ts", "tsx"],
experimental: {
reactCompiler: true,
ppr: true,
},
serverExternalPackages: ["typeorm"],
reactStrictMode: true,
output: "standalone",
images: {
remotePatterns: [
{
protocol: "https",
hostname: "fly.storage.tigris.dev"
},
],
},
};
const millionConfig = {
auto: { rsc: true }, rsc: true
}
auto: { rsc: true },
rsc: true,
};
const withMDX = createMDX({
// Add markdown plugins here, as desired
})
});
export default withMDX(million.next(nextConfig, millionConfig));

6854
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -11,36 +11,41 @@
"lint:fix": "next lint -- --fix"
},
"dependencies": {
"@heroicons/react": "^2.1.3",
"@aws-sdk/client-s3": "^3.663.0",
"@heroicons/react": "^2.1.5",
"@mdx-js/loader": "^3.0.1",
"@mdx-js/react": "^3.0.1",
"@next/bundle-analyzer": "^14.2.2",
"@next/mdx": "^14.2.3",
"@tailwindcss/typography": "^0.5.12",
"@next/bundle-analyzer": "^14.2.13",
"@next/mdx": "^14.2.13",
"@tailwindcss/typography": "^0.5.15",
"@types/mdx": "^2.0.13",
"@types/node": "^20.12.7",
"@types/react": "^18.2.79",
"@types/react-dom": "^18.2.25",
"@typescript-eslint/eslint-plugin": "^7.7.1",
"autoprefixer": "^10.4.19",
"eslint": "^8.57.0",
"eslint-config-next": "14.2.5",
"@types/node": "^22.6.1",
"@types/react": "^18.3.9",
"@types/react-dom": "^18.3.0",
"@typescript-eslint/eslint-plugin": "^8.7.0",
"autoprefixer": "^10.4.20",
"babel-plugin-react-compiler": "^0.0.0-experimental-6067d4e-20240924",
"better-sqlite3": "^9.6.0",
"eslint": "^9.11.1",
"eslint-config-next": "^15.0.0-rc.0",
"exif-reader": "^2.0.1",
"framer-motion": "^11.1.7",
"glob": "^10.4.5",
"million": "^3.0.6",
"next": "^14.2.2",
"next-auth": "^4.24.7",
"postcss": "^8.4.38",
"framer-motion": "^11.5.6",
"glob": "^11.0.0",
"million": "^3.1.11",
"next": "^15.0.0-rc.0",
"next-auth": "^5.0.0-beta",
"postcss": "^8.4.47",
"radash": "^12.1.0",
"react": "^18.2.0",
"react-dom": "^18.2.0",
"react-zoom-pan-pinch": "^3.4.4",
"react": "^19.0.0-rc-04bd67a4-20240924",
"react-dom": "^19.0.0-rc-04bd67a4-20240924",
"react-zoom-pan-pinch": "^3.6.1",
"reflect-metadata": "^0.2.2",
"server-only": "^0.0.1",
"sharp": "^0.33.3",
"sharp": "^0.33.5",
"tailwind-scrollbar": "^3.1.0",
"tailwindcss": "^3.4.3",
"typescript": "^5.4.5",
"yet-another-react-lightbox": "^3.17.4"
"tailwindcss": "^3.4.13",
"typeorm": "^0.3.20",
"typescript": "^5.6.2",
"yet-another-react-lightbox": "^3.21.6"
}
}

View File

@@ -0,0 +1,23 @@
import { signIn } from "@/lib/auth"
export default function Auth(props: {
searchParams: { callbackUrl: string | undefined }
}) {
return (
<form
className="w-40 mx-auto"
action={async () => {
"use server"
await signIn("authelia", {
redirectTo: props.searchParams?.callbackUrl ?? "",
})
}}
>
<button type="submit"
className={`rounded-lg dark:bg-dracula-bg-light transition-colors duration-100 dark:text-white px-2 py-2 font-normal border-transparent`}
>
<span>Sign in with Authelia</span>
</button>
</form>
)
}

View File

@@ -1,9 +1,11 @@
import "../globals.css";
import { SessionProvider } from "next-auth/react";
import NavBar from '@/components/navbar';
import Footer from '@/components/footer';
import LogIn from "@/components/auth/login";
import "../globals.css";
export default function RootLayout({
children,
}: Readonly<{
@@ -11,9 +13,11 @@ export default function RootLayout({
}>): React.JSX.Element {
return (
<>
<NavBar LogIn={<LogIn/>}/>
<main className="px-6 py-4 w-full mx-auto flex-1 align-middle lg:max-w-5xl">
<NavBar/>
<main className="px-6 py-4 w-full flex-1 align-middle overflow-y-scroll scrollbar scrollbar-thumb-dracula-purple scrollbar-track-dracula-bg-light">
<div className="mx-auto w-full align-middle lg:max-w-5xl ">
{children}
</div>
</main>
<Footer/>
</>

View File

@@ -4,7 +4,6 @@ import { type GetPhotos } from "@/app/api/photos/route";
async function getImageData(): Promise<GetPhotos> {
const res = await fetch(`http://localhost:3000/api/photos`, { next: { revalidate: false, tags: ['photos'] } });
console.log(res);
return res.json() as Promise<GetPhotos>;
}

View File

@@ -1,32 +1,24 @@
import { glob } from "glob";
import dynamic from "next/dynamic";
// type postMdx = {
// metadata: {
// title: string,
// date: string,
// coverImage: string,
// blurb: string,
// shortBlurb: string,
// tags: string[]
// }
// }
export const dynamicParams = false;
export async function generateStaticParams(): Promise<{slug: string[]}[]> {
const posts = await glob(`src/markdown/posts/[...slug]/**/*.mdx`, {
const posts = await glob(`${process.cwd()}/src/markdown/posts/[[]...slug[]]/**/*.mdx`, {
nodir: true,
});
const slugs = posts.map((post) => ({
slug: post.replace('src/markdown/posts/[...slug]/', '').replace(/\.mdx$/, '').split('/')
slug: [post.split('/').at(-1)!.slice(0, -4)]
}));
return slugs;
}
export default function Post({params}: {params: { slug: string[] }}): React.JSX.Element {
export default async function Post({params}: {params: { slug: string[] }}): Promise<React.JSX.Element> {
const mdxFile = await import(`../../../../markdown/posts/[...slug]/${params.slug.join('/')}.mdx`)
const Post = dynamic(async () => mdxFile);
return (
<>
{params.slug}
</>
<Post/>
);
}

View File

@@ -1,7 +1,73 @@
export default function Posts(): React.JSX.Element {
import { glob } from "glob";
import { getCurrentUrl } from "@/lib/current-url";
import { unstable_cache } from "next/cache";
import Link from "next/link";
type postDetails = {
link: string,
metadata: {
title: string,
date: string,
coverImage: string,
blurb: string,
shortBlurb: string,
tags: string[]
}
}
async function loadPostDetails(): Promise<postDetails[]> {
const posts = await glob(`${process.cwd()}/src/markdown/posts/[[]...slug[]]/**/*.mdx`, {
nodir: true,
});
const loadPostData = posts.map(async (post) => {
const slug = [post.split('/').at(-1)!.slice(0, -4)]
const mdxFile = await import(`../../../../src/markdown/posts/[...slug]/${slug.join('/')}.mdx`)
return {
metadata: mdxFile.metadata,
link: getCurrentUrl() + '/posts/' + slug.join('/')
}
});
const postData = await Promise.all(loadPostData);
return postData;
}
const getPosts = unstable_cache(
loadPostDetails,
['posts'],
{
revalidate: false
}
)
export default async function Posts(): Promise<React.JSX.Element> {
const postDetails = await getPosts();
return (
<>
Actually this should be custom
</>
<div className="flex flex-col gap-6">
{postDetails.map((post) => {
return (
<div key={post.link}>
<div className="prose dark:prose-invert mx-auto">
<h2>
<Link href={post.link}>{post.metadata.title}</Link>
</h2>
<div className="flex flex-row">
{post.metadata.tags.map((tag) => {
return (
<div key={`${post.link}_${tag}`}>
<span className="select-none text-sm me-2 px-2.5 py-1 rounded border border-dracula-pink dark:bg-dracula-bg-darker dark:text-dracula-pink">{tag}</span>
</div>
)
})}
</div>
<p>
{post.metadata.blurb}
</p>
</div>
</div>
)
})}
</div>
);
}

View File

@@ -1,7 +1,22 @@
import NextAuth from "next-auth";
import { authConfig } from "@/lib/auth";
import { NextRequest } from "next/server";
import { handlers } from "@/lib/auth";
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
const handler = NextAuth(authConfig);
const reqWithTrustedOrigin = (req: NextRequest): NextRequest => {
const proto = req.headers.get('x-forwarded-proto')
const host = req.headers.get('x-forwarded-host')
if (!proto || !host) {
console.warn("Missing x-forwarded-proto or x-forwarded-host headers.")
return req
}
const envOrigin = `${proto}://${host}`
const { href, origin } = req.nextUrl
return new NextRequest(href.replace(origin, envOrigin), req)
}
export { handler as GET, handler as POST };
export const GET = (req: NextRequest) => {
return handlers.GET(reqWithTrustedOrigin(req))
}
export const POST = (req: NextRequest) => {
return handlers.POST(reqWithTrustedOrigin(req))
}

View File

@@ -1,8 +1,7 @@
import exifReader from "exif-reader";
import { glob } from "glob";
import { NextResponse } from "next/server";
import { pick } from "radash";
import sharp from "sharp";
import { shake } from "radash";
import PhotoDataSource from "@/data-source";
import { Photo } from "@/entity/photo";
export type ImageData = {
width: number,
@@ -17,7 +16,9 @@ export type ImageData = {
FocalLength: number,
DateTimeOriginal: Date,
LensModel: string
}>
}>,
title?: string,
description?: string
}
export type GetPhotos = {
@@ -28,28 +29,28 @@ export type GetPhotos = {
}
export async function GET(): Promise<Response> {
const photosGlob = await glob(`public/photos/**/*.{png,jpeg,jpg}`, {
nodir: true,
});
const imageData = photosGlob.map(async (fileName: string) => {
const { width, height, exif } = await sharp(fileName).metadata();
const blur = await sharp(fileName)
.resize({ width: 12, height: 12, fit: 'inside' })
.toBuffer();
const exifData = exif ? exifReader(exif) : undefined;
const dataSource = await PhotoDataSource.dataSource;
const photoRepository = dataSource.getRepository(Photo);
const currentSources = await photoRepository.find();
const images = currentSources.map((photo) => {
return {
width: width ?? 10,
height: height ?? 10,
blur: `data:image/jpeg;base64,${blur.toString('base64')}` as `data:image/${string}`,
src: fileName.slice(6),
camera: exifData?.Image?.Model,
exif: pick(exifData?.Photo ?? {}, ['ExposureBiasValue', 'FNumber', 'ISOSpeedRatings', 'FocalLength', 'DateTimeOriginal', 'LensModel'])
};
});
const images = await Promise.all(imageData);
width: photo.width,
height: photo.height,
blur: photo.blur as `data:image/${string}`,
src: photo.src,
camera: photo.camera ?? undefined,
exif: shake({
ExposureBiasValue: photo.exposureBiasValue,
FNumber: photo.fNumber,
ISOSpeedRatings: photo.isoSpeedRatings,
FocalLength: photo.focalLength,
DateTimeOriginal: photo.dateTimeOriginal,
LensModel: photo.lensModel
}),
title: photo.title ?? undefined,
description: photo.description ?? undefined
}
})
return NextResponse.json<GetPhotos>({ status: 200, data: { images } });
}

View File

@@ -0,0 +1,86 @@
import { S3Client, ListObjectsV2Command, GetObjectCommand } from "@aws-sdk/client-s3";
import exifReader from "exif-reader";
import { NextResponse } from "next/server";
import { diff, sift } from "radash";
import sharp from "sharp";
import PhotoDataSource from "@/data-source";
import { Photo } from "@/entity/photo";
import { auth } from "@/lib/auth";
export type GetPhotosUpdate = {
status: number,
s3Photos: string[]
}
export const GET = auth(async function GET(req): Promise<Response> {
if (!req.auth) {
return NextResponse.json({ message: "Not authenticated" }, { status: 401 })
}
const dataSource = await PhotoDataSource.dataSource;
const photoRepository = dataSource.getRepository(Photo);
const currentSources = (await photoRepository.find({
select: {
src: true
}
})).map((photo) => photo.src);
const s3Client = new S3Client();
const listObjCmd = new ListObjectsV2Command({
Bucket: "joemonk-photos"
});
const s3Res = await s3Client.send(listObjCmd);
if (!s3Res.Contents) {
return NextResponse.json({ status: 500 })
}
const s3Photos = sift(s3Res.Contents.map((obj) => {
if (!obj.Key?.endsWith('/')) {
return `https://fly.storage.tigris.dev/joemonk-photos/${obj.Key}`;
} else {
return null;
}
}));
const newPhotos = diff(s3Photos, currentSources);
const imageData = newPhotos.map(async (fileName: string) => {
const getImageCmd = new GetObjectCommand({
Bucket: "joemonk-photos",
Key: fileName.replace("https://fly.storage.tigris.dev/joemonk-photos/", "")
})
const imgRes = await s3Client.send(getImageCmd);
const image = await imgRes.Body?.transformToByteArray();
const { width, height, exif } = await sharp(image).metadata();
const blur = await sharp(image)
.resize({ width: 12, height: 12, fit: 'inside' })
.toBuffer();
const exifData = exif ? exifReader(exif) : undefined;
const photo = new Photo();
photo.src = fileName;
photo.width = width ?? 10;
photo.height = height ?? 10;
photo.blur = `data:image/jpeg;base64,${blur.toString('base64')}` as `data:image/${string}`;
photo.camera = exifData?.Image?.Model ?? null;
photo.exposureBiasValue = exifData?.Photo?.ExposureBiasValue ?? null;
photo.fNumber = exifData?.Photo?.FNumber ?? null;
photo.isoSpeedRatings = exifData?.Photo?.ISOSpeedRatings ?? null;
photo.focalLength = exifData?.Photo?.FocalLength ?? null;
photo.dateTimeOriginal = exifData?.Photo?.DateTimeOriginal ?? null;
photo.lensModel = exifData?.Photo?.LensModel ?? null;
return photo;
});
const images = await Promise.all(imageData);
await photoRepository.save(images);
return NextResponse.json<GetPhotosUpdate>({ status: 200, s3Photos: newPhotos });
});

View File

@@ -1,15 +0,0 @@
import { auth } from '@/lib/auth';
import { revalidateTag } from 'next/cache';
import { draftMode } from 'next/headers';
import { redirect } from 'next/navigation';
export async function GET(): Promise<Response> {
const session = await auth();
if (session) {
draftMode().enable();
revalidateTag('datocms');
} else if (draftMode().isEnabled) {
draftMode().disable();
}
redirect('/');
}

View File

@@ -1,3 +1,4 @@
import "reflect-metadata";
import type { Metadata } from "next";
import { Inter } from "next/font/google";
import "./globals.css";
@@ -8,8 +9,8 @@ const inter = Inter({
});
export const metadata: Metadata = {
title: "Create Next App",
description: "Generated by create next app",
title: "Joe Monk",
description: "A portfolio page showing some of the things I've done",
};
export default function RootLayout({
@@ -24,15 +25,15 @@ export default function RootLayout({
<script id="SetTheme"
dangerouslySetInnerHTML={{
__html: `
if (localStorage.theme === 'dark' || (!('theme' in localStorage) && window.matchMedia('(prefers-color-scheme: dark)').matches)) {
document.documentElement.classList.add('dark');
} else {
if (localStorage.theme !== 'dark' || (!('theme' in localStorage) && !window.matchMedia('(prefers-color-scheme: dark)').matches)) {
document.documentElement.classList.remove('dark');
} else {
document.documentElement.classList.add('dark');
}`,
}}>
</script>
</head>
<body className="min-h-screen flex flex-col bg-dracula-bg-lightest dark:bg-dracula-bg print:white">
<body className="min-h-screen flex flex-col bg-dracula-bg-lightest dark:bg-dracula-bg print:white max-h-screen">
{children}
</body>
</html>

View File

@@ -1,19 +1,29 @@
import { auth } from "@/lib/auth";
import { LoginButton } from "./login_button";
import { LogoutButton } from "./logout_button";
import { auth, signIn, signOut } from "@/lib/auth";
import { getCurrentUrl } from "@/lib/current-url";
import UserCircleIcon from "@heroicons/react/24/outline/UserCircleIcon";
/**
* This is a server component, then the buttons are client side
*/
export default async function LogIn(): Promise<React.JSX.Element | undefined> {
const session = await auth();
if (session) {
return (
<LogoutButton/>
);
<form
action={async () => {
"use server"
if (session?.user) {
await signOut({
redirectTo: `${getCurrentUrl()}/`
})
} else {
return (
<LoginButton/>
await signIn("authelia")
}
}}
>
<button type="submit" className="p-1 dark:hover:bg-dracula-bg-light rounded-3xl transition-colors group">
<UserCircleIcon className={`h-8 w-auto transition-colors ${
session?.user ? "dark:stroke-dracula-red dark:group-hover:stroke-dracula-green" : "dark:stroke-dracula-cyan dark:group-hover:stroke-dracula-orange"
}`}/>
<span className="sr-only">{session?.user ? "Log out" : "Log in"}</span>
</button>
</form>
);
}
}

View File

@@ -1,12 +0,0 @@
"use client";
import { UserCircleIcon } from '@heroicons/react/24/outline';
import { signIn } from "next-auth/react";
export function LoginButton(): React.JSX.Element {
return (
<button className="p-1 dark:hover:bg-dracula-bglight rounded-3xl transition-colors group" onClick={() => void signIn('cognito')}>
<UserCircleIcon className='dark:stroke-dracula-cyan h-8 w-auto dark:group-hover:stroke-dracula-orange transition-colors'/>
<span className="sr-only">Log in</span>
</button>
);
}

View File

@@ -1,12 +0,0 @@
"use client";
import { UserCircleIcon } from '@heroicons/react/24/outline';
import { signOut } from "next-auth/react";
export function LogoutButton(): React.JSX.Element {
return (
<button className="p-1 dark:hover:bg-dracula-bglight rounded-3xl transition-colors group" onClick={() => void signOut()}>
<UserCircleIcon className='dark:stroke-dracula-cyan h-8 w-auto dark:group-hover:stroke-dracula-red transition-colors'/>
<span className="sr-only">Log out</span>
</button>
);
}

View File

@@ -25,7 +25,11 @@ const content: ExperienceContent[] = [
startDate: "Feb 2023",
tech: "TS/NodeJS/React/DotNet/AWS/K8s/GitOps",
title: "Technical Lead",
content: "As a technical lead, my role moved mostly into communicating with the wider business to ensure my team has clear, achievable objectives, then helping them release those objectives. I've been particularly involved with cross team collaboration to continue pushing improvements to our development process, being part of front end and back end guilds as well as having a constant input into our service architecture to set development wide architectural decisions. During this time I've worked in multiple tech stacks, needing to learn frameworks and languages to a high enough level of competence to help upskill my team within a short amount of time. Some of the projects I've led my team in have been: to rebuild of some of the most used pages, releasing web apps to millions of monthly users which included complexity such as searching, with filters and via a map along with user specific context while still scoring high SEO scores; the creation of a completely new email workflow, utilising multiple email senders, audit trails and handling bounces; and a genetic algorithm built to scale so users can track the progress in crunching complex sets of data."
content: <>
As a technical lead, my role moved mostly into communicating with the wider business to ensure my team has clear, achievable objectives, then helping them release those objectives. I have been particularly involved with cross-team collaboration to continue pushing improvements to our development process, being part of frontend and backend guilds as well as having a constant input into our service architecture to set development-wide architectural decisions. During this time, I have worked in multiple tech stacks, swiftly becoming proficient with frameworks and languages in order to upskill my team.
<br/>
Projects I have led include: rebuilding of some of the most used pages we have, releasing new web apps to millions of monthly users which included complex searching with filters and via a map, and user specific context while retaining high SEO scores and high levels of accessibility compliance; creating a new email workflow, that can utilizes multiple accounts and services to protect reputation; and creating a genetic algorithm built to scale with which users can track their progress when crunching complex sets of data.
</>
},
{
company: "Tes",
@@ -42,7 +46,9 @@ const content: ExperienceContent[] = [
tech: "TS/JS/WebGL/NodeJS",
title: "Development Manager",
content: <>
As development manager, I oversaw all of the developers at Live 5 and had a responsibility to oversee production of over 20 games a year from my teams. I kept each stage of game development on track to meet both internal and external deadlines, able to work with my teams to either change the scope of the project or move developers to get the games back on target. By implementing a proper code review process, frequent stand ups and additional tooling for developers, qa and artists, we produced far more complex games in less time with fewer bugs. <PrintBreak count={3}/> In addition, I mentored both junior and senior members of my team to develop their technical skills, knowledge and soft skills. While managing the team was my foremost responsibility, I was still heavily involved with development. I tackled any particularly difficult coding problems for the team and architecture large-scale changes within the codebase. For example, I integrated new business vital services and rebuilt our base renderer and loading core in TypeScript. One of the more interesting projects I directed was to rebuild our backend, focusing on providing local and remote interfaces to the data generation that allowed for faster development of more reliable game backends. The deployment process was also rebuilt to allow deploying into AWS for browser game access, as a package for a separate serverless game build and to run a statistical analysis on a bare metal local kubernetes cluster which I also administered.
As development manager, I oversaw all of the developers at Live 5 and had a responsibility to oversee production of over 20 games a year from my teams. I kept each stage of game development on track to meet both internal and external deadlines, able to work with my teams to either change the scope of the project or move developers to get the games back on target. <PrintBreak count={4}/> By implementing a proper code review process, frequent stand ups and additional tooling for developers, qa and artists, we produced far more complex games in less time with fewer bugs. In addition, I mentored both junior and senior members of my team to develop their technical skills, knowledge and soft skills.
<br/>
While managing the team was my foremost responsibility, I was still heavily involved with development. I tackled any particularly difficult coding problems for the team and architecture large-scale changes within the codebase. For example, I integrated new business vital services and rebuilt our base renderer and loading core in TypeScript. One of the more interesting projects I directed was to rebuild our backend, focusing on providing local and remote interfaces to the data generation that allowed for faster development of more reliable game backends. The deployment process was also rebuilt to allow deploying into AWS for browser game access, as a package for a separate serverless game build and to run a statistical analysis on a bare metal local kubernetes cluster which I also administered.
</>
},
{
@@ -65,7 +71,7 @@ const content: ExperienceContent[] = [
function Experience({content}: {content: ExperienceContent}): React.JSX.Element {
return (
<div className="flex flex-row gap-4 border-b-dracula-orange border-b last:border-b-0">
<div className="flex flex-row gap-4 dark:border-b-dracula-orange border-b-dracula-bg-light border-b-2 last:border-b-0">
<div className="w-20 justify-center text-center">
{content.endDate}
<br/>
@@ -74,18 +80,18 @@ function Experience({content}: {content: ExperienceContent}): React.JSX.Element
{content.startDate}
</div>
<div className="flex flex-col w-full">
<div className="flex flex-row w-full pb-1 mb-1 border-b-[1px] border-dracula-bg-light">
<div className="text-left">
<div className="flex flex-row w-full pb-1 mb-2 border-b-[1px] border-dracula-bg-light">
<div className="text-left self-start">
{content.title}
</div>
<div className="text-right flex-grow">
<div className="text-right flex-grow self-start">
{content.tech}
</div>
<div className="w-20 ml-3 text-right border-l-[1px] border-dracula-bg-light">
<div className="w-20 ml-3 text-right border-l-[1px] border-dracula-bg-light pr-2">
{content.company}
</div>
</div>
<div className="text-justify pb-2">
<div className="text-justify pb-2 pr-2">
{content.content}
</div>
</div>
@@ -105,14 +111,14 @@ export default function Cv(): React.JSX.Element {
<span className="border-l-[1px] border-dracula-bg-light text-right">joemonk@hotmail.co.uk</span>
</div>
<p className="text-justify">
As a highly motivated and adaptive developer, my enthusiasm for learning new technologies along with years of rapid game and web development has driven my proficiency with many languages and tools, allowing me to be flexible when tackling problems. Over the last few years I have enjoyed expanding my role to include management of multiple large teams, and have picked up new tech stacks while moving between roles.
As a highly motivated and adaptive developer, my enthusiasm for learning new technologies, along with years of rapid game and web development, has driven my proficiency with many languages and tools. This allows me to be flexible when tackling problems. Over the last few years I have enjoyed expanding my role to include management of multiple teams, and have picked up new tech stacks while moving between roles.
</p>
</div>
<div className="bg-dracula-bg-light flex flex-row px-2 py-1 gap-2 text-white">
<PaperAirplaneIcon className="h-5 my-[2px]"/>
<h2 className="font-medium">Experience</h2>
</div>
<div className="flex flex-col gap-4 p-2">
<div className="flex flex-col gap-4 py-2">
{content.map((expContent) => (
<Experience content={expContent} key={`${expContent.company}_${expContent.title}`}/>
))}

View File

@@ -68,7 +68,7 @@ export default function MyLightbox({imageData, children}: {imageData: ImageData[
return (
<div className="mx-auto">
<div className="flex flex-row flex-wrap">
<div className="flex flex-row flex-wrap justify-center">
{children.map((image, index) => (
<button key={`lightbox_img_${index}`} onClick={(() => {
setActive(index);

View File

@@ -0,0 +1,98 @@
'use client';
import { useMemo, useState } from 'react';
import Link from 'next/link';
import { HomeModernIcon, Bars3Icon, XMarkIcon } from '@heroicons/react/24/outline';
import { AnimatePresence, m, LazyMotion, domAnimation } from "framer-motion";
import { usePathname } from 'next/navigation';
import ThemeSwitcher from './theme-switcher';
type NavBarClientProps = {
LogIn: React.JSX.Element,
navigation: {
name: string;
href: string;
current: boolean;
}[]
}
export default function NavBarClient({LogIn, navigation}: NavBarClientProps): React.JSX.Element {
const [open, setOpen] = useState(false);
const pathname = usePathname();
const activeNavigation = useMemo((): typeof navigation => {
const nav = structuredClone(navigation);
const current = nav.find((nav) => nav.href === pathname);
if (current) {
current.current = true;
}
return nav;
}, [pathname]);
return (
<nav className="dark:bg-dracula-bg-darker border-b-2 dark:border-dracula-purple">
<LazyMotion features={domAnimation}>
<div className="mx-auto max-w-7xl px-4">
<div className="relative flex h-16 items-center justify-between">
<div className="flex">
<button className='sm:hidden dark:hover:bg-dracula-bg-light transition-colors duration-100 rounded-sm p-1' onClick={() => setOpen(!open)}>
{open ? (
<XMarkIcon className='rounded-sm dark:stroke-dracula-cyan h-8 w-auto'/>
) : (
<Bars3Icon className='rounded-sm dark:stroke-dracula-cyan h-8 w-auto'/>
)}
</button>
<Link className='hidden sm:flex items-center p-1 dark:hover:bg-dracula-bg-light transition-colors' href='/'>
<HomeModernIcon className='dark:stroke-dracula-cyan rounded-sm h-8 w-auto'/>
</Link>
<div className='space-x-5 hidden sm:flex ml-10'>
{activeNavigation.map((item) => (
<Link
key={item.name}
href={item.href}
className={`dark:hover:bg-dracula-bg-light transition-colors duration-100 dark:text-white rounded-sm px-3 pt-2 pb-1.5 font-normal border-b-2 border-transparent ${
item.current ? 'dark:border-b-dracula-pink' : ''
}`}
aria-current={item.current ? 'page' : undefined}
>
{item.name}
</Link>
))}
</div>
</div>
<div className='space-x-4 flex'>
<ThemeSwitcher/>
{LogIn}
</div>
</div>
</div>
<AnimatePresence>
{ open ? (
<m.div
initial={{ height: 0 }}
animate={{ height: "auto" }}
transition={{ duration: 0.15, ease: 'linear' }}
exit={{ height: 0 }}
className='sm:hidden overflow-hidden'
>
<div className='flex flex-col space-y-1 py-1'>
{activeNavigation.map((item) => (
<Link
key={item.name}
href={item.href}
className={`dark:hover:bg-dracula-bg-light transition-colors duration-100 dark:text-white px-2 py-2 font-normal border-l-4 border-transparent ${
item.current ? 'dark:border-l-dracula-pink' : ''
}`}
aria-current={item.current ? 'page' : undefined}
>
{item.name}
</Link>
))}
</div>
</m.div>
) : null}
</AnimatePresence>
</LazyMotion>
</nav>
);
}

View File

@@ -1,96 +1,28 @@
'use client';
import { useMemo, useState } from 'react';
import Link from 'next/link';
import { HomeModernIcon, Bars3Icon, XMarkIcon } from '@heroicons/react/24/outline';
import { AnimatePresence, m, LazyMotion, domAnimation } from "framer-motion";
import ThemeSwitcher from './theme-switcher';
import { usePathname } from 'next/navigation';
"use server";
import { auth } from "@/lib/auth";
import NavBarClient from "./navbar-client";
import LogIn from "./auth/login";
const defaultNavigation = [
{ name: 'Posts', href: '/posts', current: false },
{ name: 'Projects', href: '/projects', current: false },
{ name: 'Photos', href: '/photos', current: false },
{ name: 'CV', href: '/cv', current: false },
{ name: 'Contact', href: '/contact', current: false },
];
export default function NavBar({LogIn}: {LogIn: React.JSX.Element}): React.JSX.Element {
const [open, setOpen] = useState(false);
const pathname = usePathname();
const authedNavigation = [
{ name: 'Manage', href: '/manage', current: false },
]
const navigation = useMemo((): typeof defaultNavigation => {
const nav = structuredClone(defaultNavigation);
const current = nav.find((nav) => nav.href === pathname);
if (current) {
current.current = true;
export default async function NavBar(): Promise<React.JSX.Element> {
const session = await auth();
let nav = structuredClone(defaultNavigation);
if (session?.user) {
nav = nav.concat(structuredClone(authedNavigation));
}
return nav;
}, [pathname]);
return (
<nav className="dark:bg-dracula-bg-darker border-b-2 dark:border-dracula-purple">
<LazyMotion features={domAnimation}>
<div className="mx-auto max-w-7xl px-4">
<div className="relative flex h-16 items-center justify-between">
<div className="flex">
<button className='sm:hidden dark:hover:bg-dracula-bglight transition-colors duration-100 rounded-sm p-1' onClick={() => setOpen(!open)}>
{open ? (
<XMarkIcon className='rounded-sm dark:stroke-dracula-cyan h-8 w-auto'/>
) : (
<Bars3Icon className='rounded-sm dark:stroke-dracula-cyan h-8 w-auto'/>
)}
</button>
<Link className='hidden sm:flex items-center p-1 dark:hover:bg-dracula-bglight transition-colors' href='/'>
<HomeModernIcon className='dark:stroke-dracula-cyan rounded-sm h-8 w-auto'/>
</Link>
<div className='space-x-5 hidden sm:flex ml-10'>
{navigation.map((item) => (
<Link
key={item.name}
href={item.href}
className={`dark:hover:bg-dracula-bglight transition-colors duration-100 dark:text-white rounded-sm px-3 pt-2 pb-1.5 font-normal border-b-2 border-transparent ${
item.current ? 'dark:border-b-dracula-pink' : ''
}`}
aria-current={item.current ? 'page' : undefined}
>
{item.name}
</Link>
))}
</div>
</div>
<div className='space-x-4'>
<ThemeSwitcher/>
{LogIn}
</div>
</div>
</div>
<AnimatePresence>
{ open ? (
<m.div
initial={{ height: 0 }}
animate={{ height: "auto" }}
transition={{ duration: 0.15, ease: 'linear' }}
exit={{ height: 0 }}
className='sm:hidden overflow-hidden'
>
<div className='flex flex-col space-y-1 py-1'>
{navigation.map((item) => (
<Link
key={item.name}
href={item.href}
className={`dark:hover:bg-dracula-bglight transition-colors duration-100 dark:text-white px-2 py-2 font-normal border-l-4 border-transparent ${
item.current ? 'dark:border-l-dracula-pink' : ''
}`}
aria-current={item.current ? 'page' : undefined}
>
{item.name}
</Link>
))}
</div>
</m.div>
) : null}
</AnimatePresence>
</LazyMotion>
</nav>
<NavBarClient LogIn={<LogIn/>} navigation={nav}/>
);
}

View File

@@ -0,0 +1,30 @@
type postMetadata = {
title: string,
date: string,
coverImage: string,
blurb: string,
shortBlurb: string,
tags: string[]
}
type PostHeaderProps = {
metadata: postMetadata
}
export default function PostHeader({metadata}: PostHeaderProps): React.JSX.Element {
return (
<>
<h1>{metadata.title}</h1>
<div className="mb-2">{metadata.date}</div>
<div className="mb-6">
{metadata.tags.map((tag) => {
return (
<>
<span className="select-none text-sm me-2 px-2.5 py-1 rounded border border-dracula-pink dark:bg-dracula-bg-darker dark:text-dracula-pink">{tag}</span>
</>
)
})}
</div>
</>
);
}

View File

@@ -116,7 +116,7 @@ export default function Sim(): React.JSX.Element {
<br />
<input className="text-black" type="text" name="ticketsTotal" />
<br />
<button className="p-2 dark:bg-dracula-bglighter rounded-sm" type="submit">
<button className="p-2 dark:bg-dracula-bg-lighter rounded-sm" type="submit">
Run
</button>
<br />

View File

@@ -15,7 +15,7 @@ export default function ThemeSwitcher(): React.JSX.Element {
return (
<>
<button className="h-8 w-8" onClick={toggleTheme}>
<button className="h-8 w-8 m-1" onClick={toggleTheme}>
<MoonIcon className="dark:hidden block"/>
<SunIcon className="hidden dark:block dark:stroke-dracula-cyan"/>
</button>

32
src/data-source.ts Normal file
View File

@@ -0,0 +1,32 @@
import { DataSource } from "typeorm";
import { Photo } from "./entity/photo";
const dataSource = new DataSource({
type: "better-sqlite3",
database: "db.sql",
entities: [Photo],
migrations: ["./migrations"],
})
export default class PhotoDataSource {
private static _dataSource: DataSource | null = null;
static get dataSource(): Promise<DataSource> {
if (PhotoDataSource._dataSource === null) {
return PhotoDataSource.initDataSource();
} else {
return Promise.resolve(PhotoDataSource._dataSource);
}
}
static async initDataSource(): Promise<DataSource> {
if (!PhotoDataSource._dataSource || !PhotoDataSource._dataSource.isInitialized) {
const ds = await dataSource.initialize();
console.log('Photo data source initialized')
PhotoDataSource._dataSource = ds;
}
return PhotoDataSource._dataSource;
}
}
PhotoDataSource.initDataSource();

48
src/entity/photo.ts Normal file
View File

@@ -0,0 +1,48 @@
import { Column, Entity, PrimaryGeneratedColumn } from "typeorm"
@Entity()
export class Photo {
@PrimaryGeneratedColumn()
id!: number
@Column("text", { unique: true })
src!: string;
@Column()
width!: number
@Column()
height!: number
@Column("blob")
blur!: string
@Column("text", { nullable: true })
camera: string | null = null;
// Manually input data
@Column("text", { nullable: true })
title: string | null = null;
@Column("text", { nullable: true })
description: string | null = null;
// Exif data
@Column("int", { nullable: true })
exposureBiasValue: number | null = null
@Column("float", { nullable: true })
fNumber: number | null = null
@Column("int", { nullable: true })
isoSpeedRatings: number | null = null
@Column("int", { nullable: true })
focalLength: number | null = null
@Column("date", { nullable: true })
dateTimeOriginal: Date | null = null
@Column("text", { nullable: true })
lensModel: string | null = null
}

View File

@@ -1,26 +1,16 @@
import "server-only";
import { getServerSession } from "next-auth";
import CognitoProvider from "next-auth/providers/cognito";
import type { GetServerSidePropsContext, NextApiRequest, NextApiResponse } from "next";
import type { NextAuthOptions, Session } from "next-auth";
import NextAuth from "next-auth";
import { getCurrentUrl } from "./current-url";
export const authConfig = {
secret: process.env.NEXT_AUTH_SECRET,
providers: [
CognitoProvider({
clientId: process.env.NEXT_COGNITO_CLIENT_ID!,
clientSecret: process.env.NEXT_COGNITO_CLIENT_SECRET!,
issuer: process.env.NEXT_COGNITO_ISSUER,
export const { handlers, signIn, signOut, auth } = NextAuth({
providers: [{
id: "authelia",
name: "Authelia",
type: "oidc",
issuer: "https://auth.home.joemonk.co.uk",
clientId: process.env.AUTH_CLIENT_ID,
clientSecret: process.env.AUTH_CLIENT_SECRET,
}],
trustHost: true,
redirectProxyUrl: `${getCurrentUrl()}/api/auth`,
})
],
callbacks: {
redirect(): string {
return '/api/preview';
}
}
} satisfies NextAuthOptions;
// Use it in server contexts
export function auth(...args: [GetServerSidePropsContext["req"], GetServerSidePropsContext["res"]] | [NextApiRequest, NextApiResponse] | []): Promise<Session | null> {
return getServerSession(...args, authConfig);
}

View File

@@ -1,33 +0,0 @@
import { QueryClient, defaultShouldDehydrateQuery } from '@tanstack/react-query';
function makeQueryClient(): QueryClient {
return new QueryClient({
defaultOptions: {
queries: {
staleTime: 60 * 1000,
},
dehydrate: {
// include pending queries in dehydration
shouldDehydrateQuery: (query) =>
defaultShouldDehydrateQuery(query) ||
query.state.status === 'pending',
},
},
});
}
let browserQueryClient: QueryClient | undefined = undefined;
export function getQueryClient(): QueryClient {
if (typeof window === 'undefined') {
// Server: always make a new query client
return makeQueryClient();
} else {
// Browser: make a new query client if we don't already have one
// This is very important, so we don't re-make a new client if React
// suspends during the initial render. This may not be needed if we
// have a suspense boundary BELOW the creation of the query client
if (!browserQueryClient) browserQueryClient = makeQueryClient();
return browserQueryClient;
}
}

View File

@@ -1 +0,0 @@
# HEADER

View File

@@ -1,13 +1,16 @@
import PostHeader from '@/components/post-header';
export const metadata = {
title: "Being a Developer",
date: "2020-05-12",
path: "/posts/being-a-developer",
coverImage: "../images/being-a-developer/being-a-developer.jpg",
blurb: "My thoughts on being a \"developer\", being a \"programmer\" and the differences between them.",
shortBlurb: "My thoughts on being a developer vs being a programmer.",
tags: ["Blog", "Development"]
}
<PostHeader metadata={metadata} />
So over the last few years, I've had plenty of discussions about "being a developer".
These conversations usually start from people saying "such and such is a good developer", because maybe they created an excellent interface or made a beautifully reusable class. Good code is excellent and people love talking about it. The conversation then usually steers somewhere towards "but they forgot about the product", overran the deadline or missed a visual issue. And I think that defines the difference in my mind between a programmer and a developer. I'd call that person a good programmer.

View File

@@ -0,0 +1,66 @@
import PostHeader from '@/components/post-header';
export const metadata = {
title: "Learning Kubernetes",
date: "2020-12-31",
path: "/posts/learning-Kubernetes",
coverImage: "../images/learning-kubernetes/k8s.png",
blurb: "Learning how to use Kubenetes in an environment between \"Local testing\" and \"Full server deployments\".",
shortBlurb: "Finally getting around to \"learning\" Kubernetes.",
tags: ["Blog", "Development"],
}
<PostHeader metadata={metadata} />
## Moving From Docker into Kubernetes
A few years ago I spent a few days just playing with and reading up on docker, understanding how it fit together properly. I'd used it for a couple of years in the context of [Unraid](https://unraid.net/), and had cobbled together custom virtual networks and weird hacked-together script containers to try get everything to work correctly. The interface of unraid hid most of the complexities, but also the power away. It seemed to work though, and I honestly have no idea how it worked so well for so long. Looking back at it, there's no way it should have.
It then became an excellent solution for building our games (and dealing with some other tools & environments) at work - we had just moved to Gitlab, so wanted CI with the image containing build tools. Which meant I had to learn it properly enough to use in a professional environment. While I'm alright with using some hacky things where needed, when it comes to something as important as live builds I want it done as "properly" as I can.
I feel like I had a good grasp on how docker worked already, so pretty quickly felt comfortable with it. I quickly had a bunch of nice images set up in the work repos, the tooling was all auto building and connecting to AWS. The whole flow was pretty smooth.
One thing I really could not understand however, was the need for Kubernetes in anything smaller a massive web service or when uptime really matters. For us, a little server running our maths engine that got hit a few hundred times a day for test purposes was just handled by a docker-compose being copied over to a EC2 server and "upped". It recovered itself if a container died (although uptime was over half a year). With a couple of script commands from the CI, it would update happily. There was no need for anything like k8s.
## Starting off with documentation
However at work we then got a bunch of old server machines in a rack from a client. So with that in mind I thought well screw it, I'll go to the effort of learning k8s. The idea of our maths runner being able to connect to a database instead of spitting out a local file and then we running billions of runs over a load of high core machines in a day, saving the data in a centralised place was a pretty nice one. Also I like the challenge of cool little projects like this. Plus the knowledge will then also let us run some gitlab runners to be able to build every commit of our assets, as well as potentially off-load our retail builds.
Finding out exactly what Kubernetes is was a challenge by itself. I understood it as a system that basically spits containers across servers and looks after them (ensures they're up, rolls out updates and keeps the data safe). Effectively a fancy docker compose. However, multiple chats with a friend that works in dev ops really confused me to what they were for. Saying my understanding wasn't correct and it was an "orcestration system", and getting overcomplicated with things like needing helm and needing to use Google's GKE or Amazon's EKS before trying it out. The overwhelming amount of "needed" knowledge really kept me back for a while. Until I thought well screw it I'll just install it on a couple of servers and see what happens, they're not doing anything and I have the time.
Going straight to the learn Kubernetes pages seemed like a good idea. There's a bunch of interactive tutorials with loads of information. Turns out, they teach you nothing (saying do step 1, 2, 3 and now you know it is a terrible teaching method, there's no why or what you're actually doing - you need to already know that somehow) and there's a massive information overload for someone like me that roughly understands docker and that's about it. I'm not really a network person. As a starter, honestly I just wanted to know how to actually install and connect a couple of machines together and run a container on the system.
Giving up on the Kubernetes documentation as beyond my current ability, I watched a couple of videos that ended up having the same problems - it's all well and good saying here's how to run a webserver locally, but then the next step to be to now deploy it across GKE/EKS just isn't helpful for me, that's not explaining how things connect together. No one seems to care about bare metal anymore. That's the opposite problem that the Kubernetes documentation had.
I ended up finding microk8s, which appeared to have some very simple install instructions, very few, descriptive commands and seemed like a great starting point. So with that, I rebuilt 4 of the server racks with ubuntu server, set one up as a "base" machine, ssh'ed into them all and connected them. Within a few minutes I had the Kubernetes dashboard up with 4 nodes connected. Dunno why everyone makes this stuff so complicated.
## "Hello World" on Kubernetes
I think I went through quite a few different setups with microk8s, testing things out and resetting frequently. I never think this is a bad thing, so long as you learn something each time. My first "app" I decided to set up was a stats server & data store. This meant I could try out something I expected to be pretty easy (jobs), and something I thought would be a little harder (anything with storage - a Stateful Set).
My first step was to actually make the stats engine we have internally output to a database. I booted up an instance of mongodb with docker (my database of choice for this - purely because of simplicity, our data is already json formatted and database's aren't my strongest area), re-wired our output from a file to a better format and threw it at the database. Had it working within the hour, cleaned up and production ready (internal production, so a little looser), fantastic I thought. The engine already built into a docker image as that's how we alredy run it, and already had the idea of a "stats job" - do a bunch of runs, stick out the data.
With this ready to be deployed as a job, my next task was to run mongodb on the server, doesn't need to be anything special and it was so easy to create a container locally that just worked I thought this would be a doddle. Turns out it gets complicated very fast. Their documentation was, again, not great unless you're already confident with the tool or using it as a reference. They only seem to recommend using their operator and managing the system through that. Their "how-to" and basics documentation for this just links to a blog post from a few years ago, which makes it particularly hard to follow - after an attempt or two, I this idea got sacked off. I then found the helm charts by [bitnami](https://bitnami.com/), which appeared to be simple, concise and was basically "here's how to run a basic instance, here's the config values and what the do" - perfect. I had it running pretty quickly, with the commands and configs in a shiny new repo. Easy to reproduce if it all collapses, easy to see what the config is, easy to explain to people.
Running the stats job was super easy. Running the job across 4 machines, then pulling the data out of the database all just worked. The networking was easy enough, although I did have some real problems with the hostname lookups, which refused to work, I ended up injecting the hostnames into every instance which then worked. The hardest part of my "first Kubernetes app" by far though was the mongodb setup.
## Free runner minutes
I decided my next task was to setup gitlab runners. With in house runners that would expand as needed, we could happily build every commit of assets. The assets build step takes by far the most amount of time in our game builds. However, towards the end of a project especially, there just aren't that many commits to our assets repos. If the assets are pre-built, our game builds can take 1/4 of the time total. If game builds are quick and easy, developers aren't waiting around as long to ensure their build works and the whole flow comes together much better.
Connecting the instance to GitLab was easy enough, setting up the keys was fine, basically just following their how to. The bit that tripped me up next was their "one click" k8s applications. There's a big old install button next to all sorts of things you might want to install like the gitlab runners, Prometheus and a bunch of other fun applications I don't really understand (yet). Of course what they don't tell you on that page is that those buttons "install" these applications with a default config with helm, but don't that config doesn't really work, or are full of deprecated options. Cool. So after fiddling with that a bunch, I gave that up too and set up the "Cluster Management Project". This was basically just a super nice little config repo that GitLab hooked into. Fiddle with the config, add a flag, commit and you get a fully setup and installed application that links to GitLab.
Those runners then just hooked up easily, and would pick up jobs fine. It would however fail every job due to having no cache so - onto caching.
### Minio caching
GitLab runners support a few types of caching, from Google cloud something storage, AWS S3 buckets, I think there's an Azure one and local doesn't make sense (need a central source for multiple machines). The best way to have it "locally" seemed to be to setup a Minio server, which basically emulates AWS's S3 - perfect, we already use S3 and I've had a fair amount of AWS exposure at this point, so using that API seemed great.
Setting up Minio was awful. For k8s they have an operator so you *should* just have to install their operator (through yet another third party installer - krew, which installs kubectl plugins), then run the commands through that and it *should* "just work". Ok, so after setting up krew and getting the minio operator setup, I went through their docs and setup the config as I thought we needed. It wasn't quite right, so rightfully failed to launch correctly - that's fair, my fault. But then I had to go through and manually uninstall EVERY step the operator had taken or the operator would refuse to install again. After quite a few attempts I ended up with the correct set of commands and setup the operator into the namespace I wanted, with the storage I wanted across multiple machines.
The operator however installed an old version of the actual MinIO server. Which meant it couldn't actually be used (can't remember exactly why, but the old version was super out of date and couldn't pick up part of the k8s config). Fine, I can change deployments and replica sets and all that manually, I'll just do that, set the image and be done with it. The operator changed it back. So at this point I was fighting a system that would detect my changes and undo them. I tried everything I could think of, to no avail. It was an open and known issue in GitHub though, with the reply being "we'll sort it next release, coming soon" posted over a month beforehand. I ended up basically just starting an instance, waiting for it to try update itself then restart the force stopped the old version. k8s then wouldn't close the new server because the old one had been removed. It was a hacky workaround that was pretty much just tricking k8s into keeping the one I wanted alive, and definitely wasn't reproducable. Should I have left that running? No, definitely not. Did I care at this point for a small part of a system that was internal to dev only, and didn't matter that much if it failed? No.
So MinIO sort of worked at this point, at least I could access it through the web ui and set up a bucket. The way it's set up means doesn't authenticate it's ssl certs with the k8s authority, so it didn't work with the gitlab runners. After following the recommended fixes (copying the certs to a shared space, reauthing the certs etc), it still refused to work correctly, but the host could be seen in the runner even if it didn't work, which meant the runner would look it up, fail to execute the caching but it would continue the job. Not great, but it'll do for now, and I'll come back when the tools are more mature.
## Final Thoughts
At this point I had everything I wanted to run running, and didn't want to spend more time on this. I think overall my final thoughts are that Kubernetes is worth learning. Taken in small steps with a clear, small project in mind and limiting the scope of what you need to do at any one time, it can be easy to get to grips with and is worth the effort. The core concepts are relatively simple, it's the tooling and everything built on top of it that adds the complexity.

View File

@@ -0,0 +1,49 @@
import PostHeader from '@/components/post-header';
export const metadata = {
title: "Managing a Team Remotely",
date: "2020-10-05",
coverImage: "../images/managing-a-team-remotely/managing-a-team-remotely.jpg",
blurb: "With working remotely being a necessity at the moment, my thoughts on managing a team of developers with no physicality.",
shortBlurb: "My thoughts managing a team of developers with no physicality.",
tags: ["Blog", "Development"]
}
<PostHeader metadata={metadata} />
I originally had the idea for this post months ago, when we were really starting to get into the swing of working full time remotely. I have however put off writing on here since, so this never got very far. As an attempt to maybe write a bit more, I thought I'd pick up this idea again and contrast with how my thoughts on working remotely were a few months ago, to how I feel about it now.
## The Beginning
We'd always offered working for remotely for a couple of days a week to everyone past their probation, but most people didn't take up on it very often. And frequently when they did, they didn't set up their environment correctly. They knew they would only be working from home for a short period of time (mostly taking larger assets on memory sticks, maybe not setting up their environment as much as they would like etc). It was obvious we were going to have to work from home full time at some point.
My team almost exclusively worked online anyway, using services like Gitlab and AWS. However the other development team and the art team also needed to be able to work remotely, with most of their work being on an internal server. Plans had been in place to move the other development team to use the same setup that we did, but they kept being put off.
We had to make sure everyone had a machine they would be comfortable working on and they had a area set aside fit for working at. I also needed to make sure people had tested they could actually access the VPN and the internal file share for passing larger assets around. Most people said they were happy with their home set up, although we allowed one or two people to take their work PCs home. I think someone took their chair as well, which I think is a sensible move if your chair will cause problems for your back.
## A few months in
I expected us to take a little hit in productivity at the beginning and slowly regain it. To end up stuck around maybe 90-95% productive compared to being in the office.
It's always hard to tell exactly how effectively people are working, but to be honest it felt like people were actually working harder at first. Maybe to prove they weren't just slacking off at home - I'd seen it before when people worked from home for other reasons, but I didn't think it'd extend out to *having* to work from home. So what I actually saw was maybe a bit more productivity from my team. Before the lack of face to face meetings, little chats and being generally always around each other meant things started to get lost. This was mostly as new games started up, discussing the specs was much harder, and with each new build of the games our QA might miss a few of the extra things we'd done. To combat this we've pushed to make sure everything is logged, right from the beginning of the game.
## 8 months in
Last? month we could start thinking about what we were going to do to maybe get people back in. Having given it a fair amount of thought, I actually don't think it's worth my team being back in the office. All of our work is on remote servers, people have their work machine and their phones, so they can develop and we can test our game on at least 2 devices. There doesn't seem to be a big barrier in our communication either, frequently having calls and staying in touch on Slack pretty much all day.
I checked with the team to see if they were comfortable coming into the office, and everyone came in for our standard monthly development meeting. We did these every month anyway but remotely we were definitely missing the point of them. Having them back in the office feels great we can clear up any gripes anyone seems to have. It also gives me time to talk through any problems or HR related things my team had. It was super nice to see everyone again and the conversations were much easier and more full.
We also have our game kick off meetings in the office, with as many of the design team & art team as we can. This is to go through the smaller details before starting the game, which helps out with missing the day to day face to face contact with the art team. I believe it'll also be worth getting QA in on the kick off meetings just to help out with his idea of how things should be.
## Working From Home Going Forwards
With frequent in-office meetings with the development team, and the devs still being comfortable to ping the art & design team as often as needed, I don't see working from home as a standard being a problem going forwards.
I do however think our QA will continue to struggle with not having the face to face conversations to be able to ask how things should act, but hopefully with some more training, confidence and experience the gap should close there.
The art and design team will need to fill in more game details earlier and the dev team will need to perhaps fill in more gaps with their own creativity than before, but again I feel with more experience with remote working, this problem will go away.
Our art team have also had a new starter since lockdown started, only meeting most of the team face to face for the first time a few months after starting. Them being able to do this comfortably makes me feel like it won't be too much of a problem to roll any new starters into our team and get them up to date.
One massive benefit that I think we'll gain is that suddenly, when we can travel again, it makes visiting family much easier for my developers from SSpain and Portugal. There's no longer need to take the whole visit as annual holiday and maybe only really get a few days with the family, instead maybe taking a week for a 2 week visit, and fitting in the work around when their families are busy. The flexibility in our working times along with not needing to be in the office makes this completely not a problem.
Overall I think it'll be an interesting and not necessarily a bad change for my team to work remotely full time, and my worries about it being more difficult to manage have mostly been cleared up.

1
src/middleware.ts Normal file
View File

@@ -0,0 +1 @@
export { auth as middleware } from "@/lib/auth";

View File

@@ -1,3 +0,0 @@
addEventListener('message', (event) => {
postMessage({ received: true, ...event.data });
});

View File

@@ -33,12 +33,24 @@ const config: Config = {
'red': '#FF5555',
'yellow': '#F1FA8C',
}
},
typography: () => ({
DEFAULT: {
css: {
h1: {
"margin-bottom": "0.4444em", // Default is (32/36)em, this is (16/36)em
},
h2: {
"margin-bottom": "0.4em", // Default is (16/20)em, this is (12/20)em
}
}
}
})
}
},
plugins: [
require('@tailwindcss/typography'),
require('tailwind-scrollbar')({ nocompatible: true })
require('tailwind-scrollbar')
],
};
export default config;

View File

@@ -6,6 +6,8 @@
"dom.iterable",
"esnext"
],
"emitDecoratorMetadata": true,
"experimentalDecorators": true,
"allowJs": true,
"skipLibCheck": true,
"strict": true,