Skip to content

Commit

Permalink
add ollama webui with proxy route in modernizer service
Browse files Browse the repository at this point in the history
  • Loading branch information
ili16 committed Jan 11, 2024
1 parent 58ea1d7 commit 43666cc
Show file tree
Hide file tree
Showing 7 changed files with 110 additions and 8 deletions.
2 changes: 2 additions & 0 deletions Kubernetes/modernizer/modernizer-backend-deployment.yml
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,8 @@ spec:
env:
- name: OLLAMA_URL
value: "http://ollama-service.ba-kovacevic:11434"
- name: WEBUI_URL
value: "http://ollama-webui-service.ba-kovacevic:8080"
- name: WEAVIATE_HOST
value: "weaviate.ba-kovacevic:80"
- name: WEAVIATE_KEY
Expand Down
35 changes: 30 additions & 5 deletions Kubernetes/modernizer/modernizer-ingress.yml
Original file line number Diff line number Diff line change
@@ -1,8 +1,12 @@
kind: Ingress
apiVersion: networking.k8s.io/v1
metadata:
name: modernizer-ingress2
namespace: ba-kovacevic
name: modernizer-ingress
annotations:
nginx.ingress.kubernetes.io/rewrite-target: /$2$3
nginx.ingress.kubernetes.io/use-regex: 'true'
nginx.ingress.kubernetes.io/configuration-snippet: |
proxy_set_header Referer $http_referer;
spec:
tls:
- hosts:
Expand All @@ -12,14 +16,35 @@ spec:
- host: modernizer.milki-psy.dbis.rwth-aachen.de
http:
paths:
- path: /(.*)
- path: /()(ollama)(.*)
pathType: Prefix
backend:
service:
name: modernizer-service
name: modernizer-backend-service
port:
number: 443
- path: /()(weaviate)(.*)
pathType: Prefix
backend:
service:
name: modernizer-backend-service
port:
number: 443
- path: /chatbot(/|$)(.*)
pathType: Prefix
backend:
service:
name: ollama-webui-service
port:
number: 8080
- path: /ollama-debug(/|$)(.*)
pathType: Prefix
backend:
service:
name: ollama-service
port:
number: 11434
status:
loadBalancer:
ingress:
- ip: 137.226.232.176
- ip: 137.226.232.175
2 changes: 1 addition & 1 deletion Kubernetes/ollama/ollama-chatbot-deployment.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,4 +16,4 @@ spec:
spec:
containers:
- name: ollama-chatbot
image: ghcr.io/ivanfioravanti/chatbot-ollama:main
image: ghcr.io/ivanfioravanti/chatbot-ollama:main
14 changes: 14 additions & 0 deletions Kubernetes/ollama/ollama-chatbot-service.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
---

apiVersion: v1
kind: Service
metadata:
name: ollama-chatbot-service
spec:
selector:
app: ollama-chatbot
ports:
- protocol: TCP
port: 443
targetPort: 3000
type: ClusterIP
27 changes: 27 additions & 0 deletions Kubernetes/ollama/ollama-webui-deployment.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
apiVersion: apps/v1
kind: Deployment
metadata:
name: ollama-webui-deployment
spec:
replicas: 1
selector:
matchLabels:
app: ollama-webui
template:
metadata:
labels:
app: ollama-webui
spec:
containers:
- name: ollama-webui
image: ghcr.io/ollama-webui/ollama-webui:main
ports:
- containerPort: 8080
resources:
limits:
cpu: "500m"
memory: "500Mi"
env:
- name: OLLAMA_API_BASE_URL
value: "http://ollama-service.ba-kovacevic:11434/api"
tty: true
14 changes: 14 additions & 0 deletions Kubernetes/ollama/ollama-webui-service.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
apiVersion: v1
kind: Service
metadata:
name: ollama-webui-service
spec:
type: NodePort # Use LoadBalancer if you're on a cloud that supports it
selector:
app: ollama-webui
ports:
- protocol: TCP
port: 8080
targetPort: 8080
# If using NodePort, you can optionally specify the nodePort:
# nodePort: 30000
24 changes: 22 additions & 2 deletions main.go
Original file line number Diff line number Diff line change
Expand Up @@ -37,15 +37,17 @@ func main() {
c.Data(http.StatusOK, "application/json; charset=utf-8", schema)
})

router.Any("/ollama/*proxyPath", proxy)
router.Any("/ollama/*proxyPath", proxyLog)

router.Any("/chatbot/*proxyPath", proxy)

err = router.Run(":8080")
if err != nil {
return
}
}

func proxy(c *gin.Context) {
func proxyLog(c *gin.Context) {
remote, err := url.Parse(os.Getenv("OLLAMA_URL"))
if err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": "Error parsing remote URL"})
Expand Down Expand Up @@ -98,3 +100,21 @@ func (w *responseWriterInterceptor) Write(b []byte) (int, error) {
w.BodyInterceptor.Write(b)
return w.ResponseWriter.Write(b)
}

func proxy(c *gin.Context) {
remote, err := url.Parse(os.Getenv("WEBUI_URL"))
if err != nil {
panic(err)
}

proxy := httputil.NewSingleHostReverseProxy(remote)
proxy.Director = func(req *http.Request) {
req.Header = c.Request.Header
req.Host = remote.Host
req.URL.Scheme = remote.Scheme
req.URL.Host = remote.Host
req.URL.Path = c.Param("proxyPath")
}

proxy.ServeHTTP(c.Writer, c.Request)
}

0 comments on commit 43666cc

Please sign in to comment.