remove huly because it's terrible

This commit is contained in:
2025-03-21 21:44:09 +01:00
parent 1e8af651a1
commit 12f7343c3b
34 changed files with 0 additions and 1047 deletions

View File

@@ -1,97 +0,0 @@
# Quick Start with Kind
> [!NOTE]
> kind does not require kubectl, but you will not be able to perform some of the examples in our docs without it. To install kubectl see the upstream kubectl installation docs.
## Install
**macOS:**
```bash
# For Intel Macs
[ $(uname -m) = x86_64 ] && curl -Lo ./kind https://kind.sigs.k8s.io/dl/v0.26.0/kind-darwin-amd64
# For M1 / ARM Macs
[ $(uname -m) = arm64 ] && curl -Lo ./kind https://kind.sigs.k8s.io/dl/v0.26.0/kind-darwin-arm64
chmod +x ./kind
mv ./kind /some-dir-in-your-PATH/kind
```
**Linux:**
```bash
# For AMD64 / x86_64
[ $(uname -m) = x86_64 ] && curl -Lo ./kind https://kind.sigs.k8s.io/dl/v0.26.0/kind-linux-amd64
# For ARM64
[ $(uname -m) = aarch64 ] && curl -Lo ./kind https://kind.sigs.k8s.io/dl/v0.26.0/kind-linux-arm64
chmod +x ./kind
sudo mv ./kind /usr/local/bin/kind
```
## Setup cluster with port forwarding
> [!NOTE]
> On the host computer, `localhost:80` should be accessible.
```bash
cat <<EOF | kind create cluster --config=-
kind: Cluster
apiVersion: kind.x-k8s.io/v1alpha4
nodes:
- role: control-plane
extraPortMappings:
- containerPort: 80
hostPort: 80
protocol: TCP
- containerPort: 443
hostPort: 443
protocol: TCP
EOF
```
Deploy the ingress nginx controller:
```bash
kubectl apply -f https://kind.sigs.k8s.io/examples/ingress/deploy-ingress-nginx.yaml
```
Wait the nginx controller to be ready:
```bash
kubectl wait --namespace ingress-nginx \
--for=condition=ready pod \
--selector=app.kubernetes.io/component=controller \
--timeout=90s
```
Add host entries to be able to work with ingresses from the host machine:
```bash
sudo cp -p /etc/hosts /tmp/hosts
echo "127.0.0.1 huly.example" | sudo tee -a /etc/hosts
echo "127.0.0.1 account.huly.example" | sudo tee -a /etc/hosts
```
Deploy Huly with `kubectl`:
```bash
kubectl apply -R -f .
```
Wait until the front app is coming up:
```bash
kubectl wait --for=condition=Avaiable deployment/front --timeout 3m
```
Now, launch your web and and (enjoy Huly!)[http://huly.example]!
## Cleanup
Restore hosts file:
```bash
sudo mv /tmp/hosts /etc/hosts
```
Cleanup Huly:
```bash
kubectl delete -R -f .
```
Delete kind cluster:
```bash
kind delete cluster
```

View File

@@ -1,24 +0,0 @@
# Huly Kubernetes Deployment
This folder contains a sample configuration for Huly Kubernetes deployment.
## Requires
Requires a working kubernetes cluster with min one node. Each node should have min 2 vCPUs and 4GB of RAM.
If you don't have any k8s cluster, consider using the [kind setup](QUICKSTART.md).
## Check and update configuration
Huly deployment configuration is located in [config.yaml](config/config.yaml) and [secret.yaml](config/secret.yaml) files.
The sample configuration assume that Huly is available on huly.example hostname with dedicated hostname per service.
## Deploy Huly to Kubernetes
Deploy Huly with `kubectl`.
```bash
kubectl apply -R -f .
```
Now, launch your web browser and enjoy Huly!

View File

@@ -1,68 +0,0 @@
apiVersion: apps/v1
kind: Deployment
metadata:
namespace: huly
labels:
app: account
name: account
spec:
replicas: 1
selector:
matchLabels:
app: account
template:
metadata:
labels:
app: account
spec:
containers:
- env:
- name: ACCOUNTS_URL
valueFrom:
configMapKeyRef:
name: huly-config
key: ACCOUNTS_URL
- name: ACCOUNT_PORT
value: '3000'
- name: FRONT_URL
valueFrom:
configMapKeyRef:
name: huly-config
key: FRONT_URL
- name: STATS_URL
value: http://stats
- name: STORAGE_CONFIG
valueFrom:
secretKeyRef:
name: huly-secret
key: STORAGE_CONFIG
- name: MODEL_ENABLED
value: '*'
- name: DB_URL
valueFrom:
configMapKeyRef:
name: huly-config
key: MONGO_URL
- name: MONGO_URL
valueFrom:
configMapKeyRef:
name: huly-config
key: MONGO_URL
- name: SERVER_SECRET
valueFrom:
secretKeyRef:
name: huly-secret
key: SERVER_SECRET
- name: TRANSACTOR_URL
valueFrom:
configMapKeyRef:
name: huly-config
key: TRANSACTOR_URL
image: hardcoreeng/account:v0.6.471
name: account
ports:
- containerPort: 3000
resources:
limits:
memory: "512M"
restartPolicy: Always

View File

@@ -1,23 +0,0 @@
apiVersion: networking.k8s.io/v1
kind: Ingress
metadata:
namespace: huly
annotations:
kubernetes.io/ingress.class: nginx
cert-manager.io/cluster-issuer: "letsencrypt-prod"
labels:
app: account
name: account
spec:
ingressClassName: nginx
rules:
- host: account.huly.panic.haus
http:
paths:
- backend:
service:
name: account
port:
number: 80
path: /
pathType: Prefix

View File

@@ -1,13 +0,0 @@
apiVersion: v1
kind: Service
metadata:
namespace: huly
labels:
app: account
name: account
spec:
ports:
- port: 80
targetPort: 3000
selector:
app: account

View File

@@ -1,43 +0,0 @@
apiVersion: apps/v1
kind: Deployment
metadata:
namespace: huly
labels:
app: collaborator
name: collaborator
spec:
replicas: 1
selector:
matchLabels:
app: collaborator
template:
metadata:
labels:
app: collaborator
spec:
containers:
- env:
- name: ACCOUNTS_URL
value: http://account
- name: STATS_URL
value: http://stats
- name: COLLABORATOR_PORT
value: "3078"
- name: STORAGE_CONFIG
valueFrom:
secretKeyRef:
name: huly-secret
key: STORAGE_CONFIG
- name: SECRET
valueFrom:
secretKeyRef:
name: huly-secret
key: SERVER_SECRET
image: hardcoreeng/collaborator:v0.6.471
name: collaborator
ports:
- containerPort: 3078
resources:
limits:
memory: '512M'
restartPolicy: Always

View File

@@ -1,23 +0,0 @@
apiVersion: networking.k8s.io/v1
kind: Ingress
metadata:
namespace: huly
annotations:
kubernetes.io/ingress.class: nginx
cert-manager.io/cluster-issuer: "letsencrypt-prod"
labels:
app: collaborator
name: collaborator
spec:
ingressClassName: nginx
rules:
- host: collaborator.huly.panic.haus
http:
paths:
- backend:
service:
name: collaborator
port:
number: 80
path: /
pathType: Prefix

View File

@@ -1,13 +0,0 @@
apiVersion: v1
kind: Service
metadata:
namespace: huly
labels:
app: collaborator
name: collaborator
spec:
ports:
- port: 80
targetPort: 3078
selector:
app: collaborator

View File

@@ -1,16 +0,0 @@
apiVersion: v1
kind: ConfigMap
metadata:
namespace: huly
name: huly-config
data:
ACCOUNTS_URL: 'http://account.huly.panic.haus/'
COLLABORATOR_URL: 'ws://collaborator.huly.panic.haus/'
FRONT_URL: 'http://track.panic.haus'
REKONI_URL: 'http://rekoni.huly.panic.haus/'
STATS_URL: 'http://stats.huly.panic.haus/'
TRANSACTOR_URL: 'ws://transactor;ws://transactor.huly.panic.haus/'
MINIO_ENDPOINT: 'minio'
MONGO_URL: 'mongodb://mongodb:27017'
ELASTIC_URL: 'http://elastic:9200'
ELASTIC_INDEX_NAME: 'huly_storage_index'

View File

@@ -1,9 +0,0 @@
apiVersion: v1
kind: Secret
metadata:
namespace: huly
name: huly-secret
type: Opaque
stringData:
SERVER_SECRET: jm83948hg9irfi4f03igjg203id1ahc9z
STORAGE_CONFIG: minio|minio?accessKey=minioadmin&secretKey=minioadmin

View File

@@ -1,68 +0,0 @@
apiVersion: apps/v1
kind: Deployment
metadata:
namespace: huly
labels:
app: elastic
name: elastic
spec:
replicas: 1
selector:
matchLabels:
app: elastic
strategy:
type: Recreate
template:
metadata:
labels:
app: elastic
spec:
securityContext:
runAsUser: 1000
runAsGroup: 1000
fsGroup: 1000
containers:
- args:
- /bin/sh
- -c
- |-
chown -R 1000:1000 /usr/share/elasticsearch/data;
apt-get update && apt-get install -y curl;
./bin/elasticsearch-plugin list | grep -q ingest-attachment || yes | ./bin/elasticsearch-plugin install --silent ingest-attachment;
/usr/local/bin/docker-entrypoint.sh eswrapper
env:
- name: BITNAMI_DEBUG
value: "true"
- name: ELASTICSEARCH_PORT_NUMBER
value: "9200"
- name: ES_JAVA_OPTS
value: -Xms1024m -Xmx1024m
- name: discovery.type
value: single-node
- name: http.cors.allow-origin
value: http://localhost:8082
- name: http.cors.enabled
value: "true"
image: elasticsearch:7.14.2
livenessProbe:
exec:
command:
- /bin/sh
- -c
- curl -s http://localhost:9200/_cluster/health | grep -vq '"status":"red"'
initialDelaySeconds: 60
periodSeconds: 20
failureThreshold: 10
name: elastic
ports:
- containerPort: 9200
hostPort: 9200
protocol: TCP
volumeMounts:
- mountPath: /usr/share/elasticsearch/data
name: elastic
restartPolicy: Always
volumes:
- name: elastic
persistentVolumeClaim:
claimName: elastic

View File

@@ -1,14 +0,0 @@
apiVersion: v1
kind: PersistentVolumeClaim
metadata:
namespace: huly
labels:
app: elastic
name: elastic
spec:
storageClassName: longhorn
accessModes:
- ReadWriteOnce
resources:
requests:
storage: 10Gi

View File

@@ -1,15 +0,0 @@
apiVersion: v1
kind: Service
metadata:
namespace: huly
annotations:
labels:
app: elastic
name: elastic
spec:
ports:
- name: "9200"
port: 9200
targetPort: 9200
selector:
app: elastic

View File

@@ -1,77 +0,0 @@
apiVersion: apps/v1
kind: Deployment
metadata:
namespace: huly
labels:
app: front
name: front
spec:
replicas: 1
selector:
matchLabels:
app: front
template:
metadata:
labels:
app: front
spec:
containers:
- env:
- name: ACCOUNTS_URL
valueFrom:
configMapKeyRef:
name: huly-config
key: ACCOUNTS_URL
- name: CALENDAR_URL
value: http://calendar
- name: COLLABORATOR_URL
valueFrom:
configMapKeyRef:
name: huly-config
key: COLLABORATOR_URL
- name: DEFAULT_LANGUAGE
value: en
- name: ELASTIC_URL
valueFrom:
configMapKeyRef:
name: huly-config
key: ELASTIC_URL
- name: GMAIL_URL
value: http://gmail:8088
- name: STORAGE_CONFIG
valueFrom:
secretKeyRef:
name: huly-secret
key: STORAGE_CONFIG
- name: MONGO_URL
valueFrom:
configMapKeyRef:
name: huly-config
key: MONGO_URL
- name: REKONI_URL
valueFrom:
configMapKeyRef:
name: huly-config
key: REKONI_URL
- name: SERVER_PORT
value: "8080"
- name: SERVER_SECRET
valueFrom:
secretKeyRef:
name: huly-secret
key: SERVER_SECRET
- name: TELEGRAM_URL
value: http://telegram:8086
- name: TITLE
value: Huly Self Hosted
- name: UPLOAD_URL
value: /files
- name: STATS_URL
value: http://stats
- name: DESKTOP_UPDATES_CHANNEL
value: selfhost
image: hardcoreeng/front:v0.6.471
name: front
ports:
- containerPort: 8080
restartPolicy: Always

View File

@@ -1,23 +0,0 @@
apiVersion: networking.k8s.io/v1
kind: Ingress
metadata:
namespace: huly
annotations:
kubernetes.io/ingress.class: nginx
cert-manager.io/cluster-issuer: "letsencrypt-prod"
labels:
app: front
name: front
spec:
ingressClassName: nginx
rules:
- host: track.panic.haus
http:
paths:
- backend:
service:
name: front
port:
number: 80
path: /
pathType: Prefix

View File

@@ -1,13 +0,0 @@
apiVersion: v1
kind: Service
metadata:
namespace: huly
labels:
app: front
name: front
spec:
ports:
- port: 80
targetPort: 8080
selector:
app: front

View File

@@ -1,60 +0,0 @@
apiVersion: apps/v1
kind: Deployment
metadata:
namespace: huly
labels:
app: fulltext
name: fulltext
spec:
replicas: 1
selector:
matchLabels:
app: fulltext
template:
metadata:
labels:
app: fulltext
spec:
containers:
- env:
- name: SERVER_SECRET
valueFrom:
secretKeyRef:
name: huly-secret
key: SERVER_SECRET
- name: DB_URL
valueFrom:
configMapKeyRef:
name: huly-config
key: MONGO_URL
- name: FULLTEXT_DB_URL
valueFrom:
configMapKeyRef:
name: huly-config
key: ELASTIC_URL
- name: ELASTIC_INDEX_NAME
valueFrom:
configMapKeyRef:
name: huly-config
key: ELASTIC_INDEX_NAME
- name: STORAGE_CONFIG
valueFrom:
secretKeyRef:
name: huly-secret
key: STORAGE_CONFIG
- name: REKONI_URL
value: http://rekoni
- name: ACCOUNTS_URL
value: http://account
- name: STATS_URL
value: http://stats
image: hardcoreeng/fulltext:v0.6.471
name: fulltext
ports:
- containerPort: 4700
hostPort: 4700
protocol: TCP
resources:
limits:
memory: "512M"
restartPolicy: Always

View File

@@ -1,13 +0,0 @@
apiVersion: v1
kind: Service
metadata:
namespace: huly
labels:
app: fulltext
name: fulltext
spec:
ports:
- port: 80
targetPort: 4700
selector:
app: fulltext

View File

@@ -1,14 +0,0 @@
apiVersion: v1
kind: PersistentVolumeClaim
metadata:
namespace: huly
labels:
app: files
name: files
spec:
storageClassName: longhorn
accessModes:
- ReadWriteOnce
resources:
requests:
storage: 50Gi

View File

@@ -1,44 +0,0 @@
apiVersion: apps/v1
kind: Deployment
metadata:
namespace: huly
labels:
app: minio
name: minio
spec:
replicas: 1
selector:
matchLabels:
app: minio
strategy:
type: Recreate
template:
metadata:
labels:
app: minio
spec:
containers:
- args:
- server
- /data
- --address
- :9000
- --console-address
- :9001
image: minio/minio
name: minio
ports:
- containerPort: 9000
hostPort: 9000
protocol: TCP
- containerPort: 9001
hostPort: 9001
protocol: TCP
volumeMounts:
- mountPath: /data
name: files
restartPolicy: Always
volumes:
- name: files
persistentVolumeClaim:
claimName: files

View File

@@ -1,17 +0,0 @@
apiVersion: v1
kind: Service
metadata:
namespace: huly
labels:
app: minio
name: minio
spec:
ports:
- name: "9000"
port: 9000
targetPort: 9000
- name: "9001"
port: 9001
targetPort: 9001
selector:
app: minio

View File

@@ -1,38 +0,0 @@
apiVersion: apps/v1
kind: Deployment
metadata:
namespace: huly
labels:
app: mongodb
name: mongodb
spec:
replicas: 1
selector:
matchLabels:
app: mongodb
strategy:
type: Recreate
template:
metadata:
labels:
app: mongodb
spec:
containers:
- env:
- name: PGID
value: "1000"
- name: PUID
value: "1000"
image: mongo:7-jammy
name: mongodb
ports:
- containerPort: 27017
protocol: TCP
volumeMounts:
- mountPath: /data/db
name: db
restartPolicy: Always
volumes:
- name: db
persistentVolumeClaim:
claimName: db

View File

@@ -1,12 +0,0 @@
apiVersion: v1
kind: PersistentVolumeClaim
metadata:
namespace: huly
name: db
spec:
storageClassName: longhorn
accessModes:
- ReadWriteOnce
resources:
requests:
storage: 10Gi

View File

@@ -1,14 +0,0 @@
apiVersion: v1
kind: Service
metadata:
namespace: huly
labels:
app: mongodb
name: mongodb
spec:
ports:
- port: 27017
targetPort: 27017
protocol: TCP
selector:
app: mongodb

View File

@@ -1,34 +0,0 @@
apiVersion: apps/v1
kind: Deployment
metadata:
namespace: huly
labels:
app: rekoni
name: rekoni
spec:
replicas: 1
selector:
matchLabels:
app: rekoni
template:
metadata:
labels:
app: rekoni
spec:
containers:
- image: hardcoreeng/rekoni-service:v0.6.471
name: rekoni
env:
- name: SECRET
valueFrom:
secretKeyRef:
name: huly-secret
key: SERVER_SECRET
ports:
- containerPort: 4004
hostPort: 4004
protocol: TCP
resources:
limits:
memory: "500M"
restartPolicy: Always

View File

@@ -1,23 +0,0 @@
apiVersion: networking.k8s.io/v1
kind: Ingress
metadata:
namespace: huly
annotations:
kubernetes.io/ingress.class: nginx
cert-manager.io/cluster-issuer: "letsencrypt-prod"
labels:
app: rekoni
name: rekoni
spec:
ingressClassName: nginx
rules:
- host: rekoni.huly.panic.haus
http:
paths:
- backend:
service:
name: rekoni
port:
number: 80
path: /
pathType: Prefix

View File

@@ -1,13 +0,0 @@
apiVersion: v1
kind: Service
metadata:
namespace: huly
labels:
app: rekoni
name: rekoni
spec:
ports:
- port: 80
targetPort: 4004
selector:
app: rekoni

View File

@@ -1,36 +0,0 @@
apiVersion: apps/v1
kind: Deployment
metadata:
namespace: huly
labels:
app: stats
name: stats
spec:
replicas: 1
selector:
matchLabels:
app: stats
template:
metadata:
labels:
app: stats
spec:
containers:
- image: hardcoreeng/stats:v0.6.471
name: stats
env:
- name: PORT
value: "4900"
- name: SERVER_SECRET
valueFrom:
secretKeyRef:
name: huly-secret
key: SERVER_SECRET
ports:
- containerPort: 4900
hostPort: 4900
protocol: TCP
resources:
limits:
memory: "500M"
restartPolicy: Always

View File

@@ -1,23 +0,0 @@
apiVersion: networking.k8s.io/v1
kind: Ingress
metadata:
namespace: huly
annotations:
kubernetes.io/ingress.class: nginx
cert-manager.io/cluster-issuer: "letsencrypt-prod"
labels:
app: stats
name: stats
spec:
ingressClassName: nginx
rules:
- host: stats.huly.panic.haus
http:
paths:
- backend:
service:
name: stats
port:
number: 80
path: /
pathType: Prefix

View File

@@ -1,13 +0,0 @@
apiVersion: v1
kind: Service
metadata:
namespace: huly
labels:
app: stats
name: stats
spec:
ports:
- port: 80
targetPort: 4900
selector:
app: stats

View File

@@ -1,61 +0,0 @@
apiVersion: apps/v1
kind: Deployment
metadata:
namespace: huly
labels:
app: transactor
name: transactor
spec:
replicas: 1
selector:
matchLabels:
app: transactor
template:
metadata:
labels:
app: transactor
spec:
containers:
- env:
- name: ACCOUNTS_URL
value: http://account
- name: FULLTEXT_URL
value: http://fulltext
- name: FRONT_URL
valueFrom:
configMapKeyRef:
name: huly-config
key: FRONT_URL
- name: STATS_URL
value: http://stats
- name: STORAGE_CONFIG
valueFrom:
secretKeyRef:
name: huly-secret
key: STORAGE_CONFIG
- name: MONGO_URL
valueFrom:
configMapKeyRef:
name: huly-config
key: MONGO_URL
- name: DB_URL
valueFrom:
configMapKeyRef:
name: huly-config
key: MONGO_URL
- name: SERVER_CURSOR_MAXTIMEMS
value: "30000"
- name: SERVER_PORT
value: "3333"
- name: SERVER_SECRET
valueFrom:
secretKeyRef:
name: huly-secret
key: SERVER_SECRET
image: hardcoreeng/transactor:v0.6.471
name: transactor
ports:
- containerPort: 3333
hostPort: 3333
protocol: TCP
restartPolicy: Always

View File

@@ -1,23 +0,0 @@
apiVersion: networking.k8s.io/v1
kind: Ingress
metadata:
namespace: huly
annotations:
kubernetes.io/ingress.class: nginx
cert-manager.io/cluster-issuer: "letsencrypt-prod"
labels:
app: transactor
name: transactor
spec:
ingressClassName: nginx
rules:
- host: transactor.huly.panic.haus
http:
paths:
- backend:
service:
name: transactor
port:
number: 80
path: /
pathType: Prefix

View File

@@ -1,14 +0,0 @@
apiVersion: v1
kind: Service
metadata:
namespace: huly
labels:
app: transactor
name: transactor
spec:
ports:
- port: 80
protocol: TCP
targetPort: 3333
selector:
app: transactor

View File

@@ -1,56 +0,0 @@
apiVersion: apps/v1
kind: Deployment
metadata:
namespace: huly
labels:
app: workspace
name: workspace
spec:
replicas: 1
selector:
matchLabels:
app: workspace
template:
metadata:
labels:
app: workspace
spec:
containers:
- env:
- name: ACCOUNTS_URL
value: http://account
- name: STATS_URL
value: http://stats
- name: TRANSACTOR_URL
valueFrom:
configMapKeyRef:
name: huly-config
key: TRANSACTOR_URL
- name: STORAGE_CONFIG
valueFrom:
secretKeyRef:
name: huly-secret
key: STORAGE_CONFIG
- name: MODEL_ENABLED
value: '*'
- name: DB_URL
valueFrom:
configMapKeyRef:
name: huly-config
key: MONGO_URL
- name: MONGO_URL
valueFrom:
configMapKeyRef:
name: huly-config
key: MONGO_URL
- name: SERVER_SECRET
valueFrom:
secretKeyRef:
name: huly-secret
key: SERVER_SECRET
image: hardcoreeng/workspace:v0.6.471
name: workspace
resources:
limits:
memory: "512M"
restartPolicy: Always