diff --git a/Kubernetes/modernizer/modernizer-backend-deployment.yml b/Kubernetes/modernizer/modernizer-backend-deployment.yml index e106279..f30f72b 100644 --- a/Kubernetes/modernizer/modernizer-backend-deployment.yml +++ b/Kubernetes/modernizer/modernizer-backend-deployment.yml @@ -21,6 +21,8 @@ spec: env: - name: OLLAMA_URL value: "http://ollama-service.ba-kovacevic:11434" + - name: WEBUI_URL + value: "http://ollama-webui-service.ba-kovacevic:8080" - name: WEAVIATE_HOST value: "weaviate.ba-kovacevic:80" - name: WEAVIATE_KEY diff --git a/Kubernetes/modernizer/modernizer-ingress.yml b/Kubernetes/modernizer/modernizer-ingress.yml index 46a9025..6716e0f 100644 --- a/Kubernetes/modernizer/modernizer-ingress.yml +++ b/Kubernetes/modernizer/modernizer-ingress.yml @@ -1,8 +1,12 @@ kind: Ingress apiVersion: networking.k8s.io/v1 metadata: - name: modernizer-ingress2 - namespace: ba-kovacevic + name: modernizer-ingress + annotations: + nginx.ingress.kubernetes.io/rewrite-target: /$2$3 + nginx.ingress.kubernetes.io/use-regex: 'true' + nginx.ingress.kubernetes.io/configuration-snippet: | + proxy_set_header Referer $http_referer; spec: tls: - hosts: @@ -12,14 +16,35 @@ spec: - host: modernizer.milki-psy.dbis.rwth-aachen.de http: paths: - - path: /(.*) + - path: /()(ollama)(.*) pathType: Prefix backend: service: - name: modernizer-service + name: modernizer-backend-service port: number: 443 + - path: /()(weaviate)(.*) + pathType: Prefix + backend: + service: + name: modernizer-backend-service + port: + number: 443 + - path: /chatbot(/|$)(.*) + pathType: Prefix + backend: + service: + name: ollama-webui-service + port: + number: 8080 + - path: /ollama-debug(/|$)(.*) + pathType: Prefix + backend: + service: + name: ollama-service + port: + number: 11434 status: loadBalancer: ingress: - - ip: 137.226.232.176 + - ip: 137.226.232.175 diff --git a/Kubernetes/ollama/ollama-chatbot-deployment.yml b/Kubernetes/ollama/ollama-chatbot-deployment.yml index eb10069..8e86514 100644 --- a/Kubernetes/ollama/ollama-chatbot-deployment.yml +++ b/Kubernetes/ollama/ollama-chatbot-deployment.yml @@ -16,4 +16,4 @@ spec: spec: containers: - name: ollama-chatbot - image: ghcr.io/ivanfioravanti/chatbot-ollama:main \ No newline at end of file + image: ghcr.io/ivanfioravanti/chatbot-ollama:main diff --git a/Kubernetes/ollama/ollama-chatbot-service.yml b/Kubernetes/ollama/ollama-chatbot-service.yml new file mode 100644 index 0000000..2c59bd8 --- /dev/null +++ b/Kubernetes/ollama/ollama-chatbot-service.yml @@ -0,0 +1,14 @@ +--- + +apiVersion: v1 +kind: Service +metadata: + name: ollama-chatbot-service +spec: + selector: + app: ollama-chatbot + ports: + - protocol: TCP + port: 443 + targetPort: 3000 + type: ClusterIP diff --git a/Kubernetes/ollama/ollama-webui-deployment.yml b/Kubernetes/ollama/ollama-webui-deployment.yml new file mode 100644 index 0000000..81d4b35 --- /dev/null +++ b/Kubernetes/ollama/ollama-webui-deployment.yml @@ -0,0 +1,27 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: ollama-webui-deployment +spec: + replicas: 1 + selector: + matchLabels: + app: ollama-webui + template: + metadata: + labels: + app: ollama-webui + spec: + containers: + - name: ollama-webui + image: ghcr.io/ollama-webui/ollama-webui:main + ports: + - containerPort: 8080 + resources: + limits: + cpu: "500m" + memory: "500Mi" + env: + - name: OLLAMA_API_BASE_URL + value: "http://ollama-service.ba-kovacevic:11434/api" + tty: true diff --git a/Kubernetes/ollama/ollama-webui-service.yml b/Kubernetes/ollama/ollama-webui-service.yml new file mode 100644 index 0000000..59a6aa1 --- /dev/null +++ b/Kubernetes/ollama/ollama-webui-service.yml @@ -0,0 +1,14 @@ +apiVersion: v1 +kind: Service +metadata: + name: ollama-webui-service +spec: + type: NodePort # Use LoadBalancer if you're on a cloud that supports it + selector: + app: ollama-webui + ports: + - protocol: TCP + port: 8080 + targetPort: 8080 + # If using NodePort, you can optionally specify the nodePort: + # nodePort: 30000 diff --git a/main.go b/main.go index 1ce2f4a..9e0560e 100644 --- a/main.go +++ b/main.go @@ -37,7 +37,9 @@ func main() { c.Data(http.StatusOK, "application/json; charset=utf-8", schema) }) - router.Any("/ollama/*proxyPath", proxy) + router.Any("/ollama/*proxyPath", proxyLog) + + router.Any("/chatbot/*proxyPath", proxy) err = router.Run(":8080") if err != nil { @@ -45,7 +47,7 @@ func main() { } } -func proxy(c *gin.Context) { +func proxyLog(c *gin.Context) { remote, err := url.Parse(os.Getenv("OLLAMA_URL")) if err != nil { c.JSON(http.StatusInternalServerError, gin.H{"error": "Error parsing remote URL"}) @@ -98,3 +100,21 @@ func (w *responseWriterInterceptor) Write(b []byte) (int, error) { w.BodyInterceptor.Write(b) return w.ResponseWriter.Write(b) } + +func proxy(c *gin.Context) { + remote, err := url.Parse(os.Getenv("WEBUI_URL")) + if err != nil { + panic(err) + } + + proxy := httputil.NewSingleHostReverseProxy(remote) + proxy.Director = func(req *http.Request) { + req.Header = c.Request.Header + req.Host = remote.Host + req.URL.Scheme = remote.Scheme + req.URL.Host = remote.Host + req.URL.Path = c.Param("proxyPath") + } + + proxy.ServeHTTP(c.Writer, c.Request) +}