共计 21768 个字符,预计需要花费 55 分钟才能阅读完成。

Ingress 概要
Ingress 相当于一个7层的负载均衡器(在service前面再增加一层7层的抽象),其只需要一个NodePort或者一个LB就可以实现所有service对外服务
Ingress 组成:
- ingress controller:根据流量规则进行流量负载均衡的实体。监听ingress规则变化后,将对应service的Endpoints 地址写入负载均衡器配置中,而不是将service地址写入负载配置中,从而减少一层service转发
- nginx
- contour
- haproxy
- trafik
- istio
- ingress:定义具体流量匹配规则
Ingress 安装
ingress 部署方式
- Deployment + LoadBalancer: 适合云厂商
- Deployment + NodePort:适用于宿主机的ip相对固定,因为需要NodePort所以多了一层nat,请求量很大的时候可能会存在影响
- DaemonSet + HostNetwork:直接使用宿主机端口,性能较NodePort好,但是一个node只能部署一个,适合高并发场景使用
DaemonSet + HostNetwork 方式部署
[root@node1 ingress]# cat >nginx-ingress.yaml<'EOF'
# 如果打算用于生产环境,请参考 https://github.com/nginxinc/kubernetes-ingress/blob/v1.5.5/docs/installation.md 并根据您自己的情况做进一步定制
apiVersion: v1
kind: Namespace
metadata:
name: nginx-ingress
---
apiVersion: v1
kind: ServiceAccount
metadata:
name: nginx-ingress
namespace: nginx-ingress
---
apiVersion: v1
kind: Secret
metadata:
name: default-server-secret
namespace: nginx-ingress
type: Opaque
data:
tls.crt: LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSUN2akNDQWFZQ0NRREFPRjl0THNhWFhEQU5CZ2txaGtpRzl3MEJBUXNGQURBaE1SOHdIUVlEVlFRRERCWk8KUjBsT1dFbHVaM0psYzNORGIyNTBjbTlzYkdWeU1CNFhEVEU0TURreE1qRTRNRE16TlZvWERUSXpNRGt4TVRFNApNRE16TlZvd0lURWZNQjBHQTFVRUF3d1dUa2RKVGxoSmJtZHlaWE56UTI5dWRISnZiR3hsY2pDQ0FTSXdEUVlKCktvWklodmNOQVFFQkJRQURnZ0VQQURDQ0FRb0NnZ0VCQUwvN2hIUEtFWGRMdjNyaUM3QlBrMTNpWkt5eTlyQ08KR2xZUXYyK2EzUDF0azIrS3YwVGF5aGRCbDRrcnNUcTZzZm8vWUk1Y2Vhbkw4WGM3U1pyQkVRYm9EN2REbWs1Qgo4eDZLS2xHWU5IWlg0Rm5UZ0VPaStlM2ptTFFxRlBSY1kzVnNPazFFeUZBL0JnWlJVbkNHZUtGeERSN0tQdGhyCmtqSXVuektURXUyaDU4Tlp0S21ScUJHdDEwcTNRYzhZT3ExM2FnbmovUWRjc0ZYYTJnMjB1K1lYZDdoZ3krZksKWk4vVUkxQUQ0YzZyM1lma1ZWUmVHd1lxQVp1WXN2V0RKbW1GNWRwdEMzN011cDBPRUxVTExSakZJOTZXNXIwSAo1TmdPc25NWFJNV1hYVlpiNWRxT3R0SmRtS3FhZ25TZ1JQQVpQN2MwQjFQU2FqYzZjNGZRVXpNQ0F3RUFBVEFOCkJna3Foa2lHOXcwQkFRc0ZBQU9DQVFFQWpLb2tRdGRPcEsrTzhibWVPc3lySmdJSXJycVFVY2ZOUitjb0hZVUoKdGhrYnhITFMzR3VBTWI5dm15VExPY2xxeC9aYzJPblEwMEJCLzlTb0swcitFZ1U2UlVrRWtWcitTTFA3NTdUWgozZWI4dmdPdEduMS9ienM3bzNBaS9kclkrcUI5Q2k1S3lPc3FHTG1US2xFaUtOYkcyR1ZyTWxjS0ZYQU80YTY3Cklnc1hzYktNbTQwV1U3cG9mcGltU1ZmaXFSdkV5YmN3N0NYODF6cFErUyt1eHRYK2VBZ3V0NHh3VlI5d2IyVXYKelhuZk9HbWhWNThDd1dIQnNKa0kxNXhaa2VUWXdSN0diaEFMSkZUUkk3dkhvQXprTWIzbjAxQjQyWjNrN3RXNQpJUDFmTlpIOFUvOWxiUHNoT21FRFZkdjF5ZytVRVJxbStGSis2R0oxeFJGcGZnPT0KLS0tLS1FTkQgQ0VSVElGSUNBVEUtLS0tLQo=
tls.key: LS0tLS1CRUdJTiBSU0EgUFJJVkFURSBLRVktLS0tLQpNSUlFcEFJQkFBS0NBUUVBdi91RWM4b1JkMHUvZXVJTHNFK1RYZUprckxMMnNJNGFWaEMvYjVyYy9XMlRiNHEvClJOcktGMEdYaVN1eE9ycXgrajlnamx4NXFjdnhkenRKbXNFUkJ1Z1B0ME9hVGtIekhvb3FVWmcwZGxmZ1dkT0EKUTZMNTdlT1l0Q29VOUZ4amRXdzZUVVRJVUQ4R0JsRlNjSVo0b1hFTkhzbysyR3VTTWk2Zk1wTVM3YUhudzFtMApxWkdvRWEzWFNyZEJ6eGc2clhkcUNlUDlCMXl3VmRyYURiUzc1aGQzdUdETDU4cGszOVFqVUFQaHpxdmRoK1JWClZGNGJCaW9CbTVpeTlZTW1hWVhsMm0wTGZzeTZuUTRRdFFzdEdNVWozcGJtdlFmazJBNnljeGRFeFpkZFZsdmwKMm82MjBsMllxcHFDZEtCRThCay90elFIVTlKcU56cHpoOUJUTXdJREFRQUJBb0lCQVFDZklHbXowOHhRVmorNwpLZnZJUXQwQ0YzR2MxNld6eDhVNml4MHg4Mm15d1kxUUNlL3BzWE9LZlRxT1h1SENyUlp5TnUvZ2IvUUQ4bUFOCmxOMjRZTWl0TWRJODg5TEZoTkp3QU5OODJDeTczckM5bzVvUDlkazAvYzRIbjAzSkVYNzZ5QjgzQm9rR1FvYksKMjhMNk0rdHUzUmFqNjd6Vmc2d2szaEhrU0pXSzBwV1YrSjdrUkRWYmhDYUZhNk5nMUZNRWxhTlozVDhhUUtyQgpDUDNDeEFTdjYxWTk5TEI4KzNXWVFIK3NYaTVGM01pYVNBZ1BkQUk3WEh1dXFET1lvMU5PL0JoSGt1aVg2QnRtCnorNTZud2pZMy8yUytSRmNBc3JMTnIwMDJZZi9oY0IraVlDNzVWYmcydVd6WTY3TWdOTGQ5VW9RU3BDRkYrVm4KM0cyUnhybnhBb0dCQU40U3M0ZVlPU2huMVpQQjdhTUZsY0k2RHR2S2ErTGZTTXFyY2pOZjJlSEpZNnhubmxKdgpGenpGL2RiVWVTbWxSekR0WkdlcXZXaHFISy9iTjIyeWJhOU1WMDlRQ0JFTk5jNmtWajJTVHpUWkJVbEx4QzYrCk93Z0wyZHhKendWelU0VC84ajdHalRUN05BZVpFS2FvRHFyRG5BYWkyaW5oZU1JVWZHRXFGKzJyQW9HQkFOMVAKK0tZL0lsS3RWRzRKSklQNzBjUis3RmpyeXJpY05iWCtQVzUvOXFHaWxnY2grZ3l4b25BWlBpd2NpeDN3QVpGdwpaZC96ZFB2aTBkWEppc1BSZjRMazg5b2pCUmpiRmRmc2l5UmJYbyt3TFU4NUhRU2NGMnN5aUFPaTVBRHdVU0FkCm45YWFweUNweEFkREtERHdObit3ZFhtaTZ0OHRpSFRkK3RoVDhkaVpBb0dCQUt6Wis1bG9OOTBtYlF4VVh5YUwKMjFSUm9tMGJjcndsTmVCaWNFSmlzaEhYa2xpSVVxZ3hSZklNM2hhUVRUcklKZENFaHFsV01aV0xPb2I2NTNyZgo3aFlMSXM1ZUtka3o0aFRVdnpldm9TMHVXcm9CV2xOVHlGanIrSWhKZnZUc0hpOGdsU3FkbXgySkJhZUFVWUNXCndNdlQ4NmNLclNyNkQrZG8wS05FZzFsL0FvR0FlMkFVdHVFbFNqLzBmRzgrV3hHc1RFV1JqclRNUzRSUjhRWXQKeXdjdFA4aDZxTGxKUTRCWGxQU05rMXZLTmtOUkxIb2pZT2pCQTViYjhibXNVU1BlV09NNENoaFJ4QnlHbmR2eAphYkJDRkFwY0IvbEg4d1R0alVZYlN5T294ZGt5OEp0ek90ajJhS0FiZHd6NlArWDZDODhjZmxYVFo5MWpYL3RMCjF3TmRKS2tDZ1lCbyt0UzB5TzJ2SWFmK2UwSkN5TGhzVDQ5cTN3Zis2QWVqWGx2WDJ1VnRYejN5QTZnbXo5aCsKcDNlK2JMRUxwb3B0WFhNdUFRR0xhUkcrYlNNcjR5dERYbE5ZSndUeThXczNKY3dlSTdqZVp2b0ZpbmNvVlVIMwphdmxoTUVCRGYxSjltSDB5cDBwWUNaS2ROdHNvZEZtQktzVEtQMjJhTmtsVVhCS3gyZzR6cFE9PQotLS0tLUVORCBSU0EgUFJJVkFURSBLRVktLS0tLQo=
---
kind: ConfigMap
apiVersion: v1
metadata:
name: nginx-config
namespace: nginx-ingress
data:
server-names-hash-bucket-size: "1024"
---
kind: ClusterRole
apiVersion: rbac.authorization.k8s.io/v1
metadata:
name: nginx-ingress
rules:
- apiGroups:
- ""
resources:
- services
- endpoints
verbs:
- get
- list
- watch
- apiGroups:
- ""
resources:
- secrets
verbs:
- get
- list
- watch
- apiGroups:
- ""
resources:
- configmaps
verbs:
- get
- list
- watch
- update
- create
- apiGroups:
- ""
resources:
- pods
verbs:
- list
- apiGroups:
- ""
resources:
- events
verbs:
- create
- patch
- apiGroups:
- extensions
resources:
- ingresses
verbs:
- list
- watch
- get
- apiGroups:
- "extensions"
resources:
- ingresses/status
verbs:
- update
- apiGroups:
- k8s.nginx.org
resources:
- virtualservers
- virtualserverroutes
verbs:
- list
- watch
- get
---
kind: ClusterRoleBinding
apiVersion: rbac.authorization.k8s.io/v1
metadata:
name: nginx-ingress
subjects:
- kind: ServiceAccount
name: nginx-ingress
namespace: nginx-ingress
roleRef:
kind: ClusterRole
name: nginx-ingress
apiGroup: rbac.authorization.k8s.io
---
apiVersion: apps/v1
kind: DaemonSet
metadata:
name: nginx-ingress
namespace: nginx-ingress
annotations:
prometheus.io/scrape: "true"
prometheus.io/port: "9113"
spec:
selector:
matchLabels:
app: nginx-ingress
template:
metadata:
labels:
app: nginx-ingress
spec:
serviceAccountName: nginx-ingress
containers:
- image: nginx/nginx-ingress:1.5.5
name: nginx-ingress
ports:
- name: http
containerPort: 80
hostPort: 80
- name: https
containerPort: 443
hostPort: 443
- name: prometheus
containerPort: 9113
env:
- name: POD_NAMESPACE
valueFrom:
fieldRef:
fieldPath: metadata.namespace
- name: POD_NAME
valueFrom:
fieldRef:
fieldPath: metadata.name
args:
- -nginx-configmaps=$(POD_NAMESPACE)/nginx-config
- -default-server-tls-secret=$(POD_NAMESPACE)/default-server-secret
#- -v=3 # Enables extensive logging. Useful for troubleshooting.
#- -report-ingress-status
#- -external-service=nginx-ingress
#- -enable-leader-election
- -enable-prometheus-metrics
#- -enable-custom-resources
EOF
[root@node1 ingress]# kubectl apply -f nginx-ingress.yaml
namespace/nginx-ingress created
serviceaccount/nginx-ingress created
secret/default-server-secret created
configmap/nginx-config created
clusterrole.rbac.authorization.k8s.io/nginx-ingress created
clusterrolebinding.rbac.authorization.k8s.io/nginx-ingress created
daemonset.apps/nginx-ingress created
[root@node1 ingress]# kubectl get -f nginx-ingress.yaml
NAME STATUS AGE
namespace/nginx-ingress Active 84s
NAME SECRETS AGE
serviceaccount/nginx-ingress 1 84s
NAME TYPE DATA AGE
secret/default-server-secret Opaque 2 84s
NAME DATA AGE
configmap/nginx-config 1 84s
NAME CREATED AT
clusterrole.rbac.authorization.k8s.io/nginx-ingress 2022-05-07T16:28:34Z
NAME ROLE AGE
clusterrolebinding.rbac.authorization.k8s.io/nginx-ingress ClusterRole/nginx-ingress 84s
NAME DESIRED CURRENT READY UP-TO-DATE AVAILABLE NODE SELECTOR AGE
daemonset.apps/nginx-ingress 1 1 1 1 1 <none> 84s
Ingress 使用
创建deployment
[root@node1 ingress]# cat >nginx-deployment.yaml<'EOF'
apiVersion: apps/v1
kind: Deployment
metadata:
name: nginx1
labels:
app: nginx1
spec:
replicas: 3
selector:
matchLabels:
app: nginx1
template:
metadata:
labels:
app: nginx1
spec:
initContainers:
- name: init-container
image: busybox:latest
imagePullPolicy: IfNotPresent
command: ["sh"]
env:
# - name: MY_POD_NAME
# valueFrom:
# fieldRef:
# fieldPath: metadata.name
- name: MY_POD_IP
valueFrom:
fieldRef:
fieldPath: status.podIP
args:
[
"-c",
"echo ${HOSTNAME} ${MY_POD_IP} > /wwwroot/index.html",
]
volumeMounts:
- name: wwwroot
mountPath: "/wwwroot"
containers:
- name: nginx
image: nginx:latest
ports:
- containerPort: 80
protocol: TCP
volumeMounts:
- name: wwwroot
mountPath: /usr/share/nginx/html/index.html
subPath: index.html
volumes:
- name: wwwroot
emptyDir: {}
EOF
[root@node1 ingress]# kubectl apply -f nginx-deployment.yaml
deployment.apps/nginx1 created
[root@node1 ingress]# kubectl get pods -l app=nginx1 -o wide
NAME READY STATUS RESTARTS AGE IP NODE NOMINATED NODE READINESS GATES
nginx1-859486d7bb-7hc5b 1/1 Running 0 8m10s 10.100.166.169 node1 <none> <none>
nginx1-859486d7bb-jcdlr 1/1 Running 0 8m10s 10.100.166.184 node1 <none> <none>
nginx1-859486d7bb-z2sml 1/1 Running 0 8m10s 10.100.166.171 node1 <none> <none>
创建service
[root@node1 ingress]# cat >nginx-service.yaml<'EOF'
apiVersion: v1
kind: Service
metadata:
name: nginx1-service
labels:
app: nginx1-service
spec:
ports:
- port: 80
targetPort: 80
name: nginx1-service
selector:
app: nginx1
EOF
[root@node1 ingress]# kubectl get svc
NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE
kubernetes ClusterIP 10.96.0.1 <none> 443/TCP 2d1h
nginx1-service ClusterIP 10.96.124.204 <none> 80/TCP 5s
[root@node1 ingress]# kubectl describe svc nginx1-service
Name: nginx1-service
Namespace: default
Labels: app=nginx1-service
Annotations: Selector: app=nginx1
Type: ClusterIP
IP: 10.96.124.204
Port: nginx1-service 80/TCP
TargetPort: 80/TCP
Endpoints: 10.100.166.169:80,10.100.166.171:80,10.100.166.184:80
Session Affinity: None
Events: <none>
创建ingress
[root@node1 ingress]# cat ingress-demo.yaml
apiVersion: extensions/v1beta1
kind: Ingress
metadata:
name: ingress-demo
namespace: default
spec:
rules:
- host: demo.xadocker.cn
http:
paths:
- path: /
backend:
serviceName: nginx1-service
servicePort: 80
[root@node1 ingress]# kubectl apply -f ingress-demo.yaml
ingress.extensions/ingress-demo created
[root@node1 ingress]#
[root@node1 ingress]# kubectl get -f ingress-demo.yaml
NAME CLASS HOSTS ADDRESS PORTS AGE
ingress-demo <none> demo.xadocker.cn 80 7s
查看ingress日志
重载配置=>启动新worker progress=>旧worker progress接收(SIGHUP)后优雅推出
[root@node1 ingress]# kubectl logs nginx-ingress-k2skb -n nginx-ingress
.....
2021/04/01 16:48:17 [notice] 18#18: worker process 49 exited with code 0
2021/04/01 16:48:17 [notice] 18#18: signal 29 (SIGIO) received
I0507 16:48:17.408500 1 event.go:209] Event(v1.ObjectReference{Kind:"Ingress", Namespace:"default", Name:"ingress-demo", UID:"4984700b-75b5-4ede-8c9f-6ddb0fd1a121", APIVersion:"extensions/v1beta1", ResourceVersion:"196905", FieldPath:""}): type: 'Normal' reason: 'AddedOrUpdated' Configuration for default/ingress-demo was added or updated
.....
# 扩容 demo 副本数
[root@node1 ingress]# kubectl scale deployment nginx1 --replicas=5
# 查看日志,重载配置=>启动新worker progress=>旧worker progress接收(SIGHUP)后优雅退出
[root@node1 ingress]# kubectl logs nginx-ingress-k2skb -n nginx-ingress
.....
2021/04/01 17:03:54 [notice] 63#63: signal process started
2021/04/01 17:03:54 [notice] 18#18: signal 1 (SIGHUP) received from 63, reconfiguring
2021/04/01 17:03:54 [notice] 18#18: reconfiguring
2021/04/01 17:03:54 [notice] 18#18: using the "epoll" event method
2021/04/01 17:03:54 [notice] 18#18: start worker processes
2021/04/01 17:03:54 [notice] 18#18: start worker process 64
2021/04/01 17:03:54 [notice] 18#18: start worker process 65
2021/04/01 17:03:54 [notice] 18#18: start worker process 66
2021/04/01 17:03:54 [notice] 18#18: start worker process 67
2021/04/01 17:03:54 [notice] 18#18: start worker process 68
2021/04/01 17:03:54 [notice] 18#18: start worker process 69
2021/04/01 17:03:54 [notice] 18#18: start worker process 70
2021/04/01 17:03:54 [notice] 18#18: start worker process 71
2021/04/01 17:03:55 [notice] 56#56: gracefully shutting down
2021/04/01 17:03:55 [notice] 58#58: gracefully shutting down
2021/04/01 17:03:55 [notice] 57#57: gracefully shutting down
2021/04/01 17:03:55 [notice] 56#56: exiting
2021/04/01 17:03:55 [notice] 59#59: gracefully shutting down
2021/04/01 17:03:55 [notice] 57#57: exiting
2021/04/01 17:03:55 [notice] 60#60: gracefully shutting down
2021/04/01 17:03:55 [notice] 58#58: exiting
2021/04/01 17:03:55 [notice] 60#60: exiting
2021/04/01 17:03:55 [notice] 59#59: exiting
2021/04/01 17:03:55 [notice] 57#57: exit
2021/04/01 17:03:55 [notice] 58#58: exit
2021/04/01 17:03:55 [notice] 56#56: exit
2021/04/01 17:03:55 [notice] 60#60: exit
2021/04/01 17:03:55 [notice] 59#59: exit
2021/04/01 17:03:55 [notice] 54#54: gracefully shutting down
2021/04/01 17:03:55 [notice] 61#61: gracefully shutting down
2021/04/01 17:03:55 [notice] 61#61: exiting
2021/04/01 17:03:55 [notice] 54#54: exiting
2021/04/01 17:03:55 [notice] 61#61: exit
2021/04/01 17:03:55 [notice] 54#54: exit
2021/04/01 17:03:55 [notice] 55#55: gracefully shutting down
2021/04/01 17:03:55 [notice] 55#55: exiting
2021/04/01 17:03:55 [notice] 55#55: exit
2021/04/01 17:03:55 [notice] 18#18: signal 17 (SIGCHLD) received from 61
2021/04/01 17:03:55 [notice] 18#18: worker process 61 exited with code 0
2021/04/01 17:03:55 [notice] 18#18: signal 29 (SIGIO) received
2021/04/01 17:03:55 [notice] 18#18: signal 17 (SIGCHLD) received from 54
2021/04/01 17:03:55 [notice] 18#18: worker process 54 exited with code 0
2021/04/01 17:03:55 [notice] 18#18: worker process 55 exited with code 0
2021/04/01 17:03:55 [notice] 18#18: signal 29 (SIGIO) received
2021/04/01 17:03:55 [notice] 18#18: signal 17 (SIGCHLD) received from 56
2021/04/01 17:03:55 [notice] 18#18: worker process 56 exited with code 0
2021/04/01 17:03:55 [notice] 18#18: signal 29 (SIGIO) received
2021/04/01 17:03:55 [notice] 18#18: signal 17 (SIGCHLD) received from 59
2021/04/01 17:03:55 [notice] 18#18: worker process 57 exited with code 0
2021/04/01 17:03:55 [notice] 18#18: worker process 59 exited with code 0
2021/04/01 17:03:55 [notice] 18#18: worker process 60 exited with code 0
2021/04/01 17:03:55 [notice] 18#18: worker process 58 exited with code 0
2021/04/01 17:03:55 [notice] 18#18: signal 29 (SIGIO) received
2021/04/01 17:03:55 [notice] 18#18: signal 17 (SIGCHLD) received from 58
2021/04/01 17:03:55 [notice] 73#73: signal process started
2021/04/01 17:03:55 [notice] 18#18: signal 1 (SIGHUP) received from 73, reconfiguring
2021/04/01 17:03:55 [notice] 18#18: reconfiguring
2021/04/01 17:03:55 [notice] 18#18: using the "epoll" event method
2021/04/01 17:03:55 [notice] 18#18: start worker processes
2021/04/01 17:03:55 [notice] 18#18: start worker process 74
2021/04/01 17:03:55 [notice] 18#18: start worker process 75
2021/04/01 17:03:55 [notice] 18#18: start worker process 76
2021/04/01 17:03:55 [notice] 18#18: start worker process 77
2021/04/01 17:03:55 [notice] 18#18: start worker process 78
2021/04/01 17:03:55 [notice] 18#18: start worker process 79
2021/04/01 17:03:55 [notice] 18#18: start worker process 80
2021/04/01 17:03:55 [notice] 18#18: start worker process 81
2021/04/01 17:03:55 [notice] 64#64: gracefully shutting down
2021/04/01 17:03:55 [notice] 65#65: gracefully shutting down
2021/04/01 17:03:55 [notice] 67#67: gracefully shutting down
2021/04/01 17:03:55 [notice] 66#66: gracefully shutting down
2021/04/01 17:03:55 [notice] 64#64: exiting
2021/04/01 17:03:55 [notice] 65#65: exiting
2021/04/01 17:03:55 [notice] 71#71: gracefully shutting down
2021/04/01 17:03:55 [notice] 66#66: exiting
2021/04/01 17:03:55 [notice] 71#71: exiting
2021/04/01 17:03:55 [notice] 67#67: exiting
2021/04/01 17:03:55 [notice] 65#65: exit
2021/04/01 17:03:55 [notice] 68#68: gracefully shutting down
2021/04/01 17:03:55 [notice] 66#66: exit
2021/04/01 17:03:55 [notice] 71#71: exit
2021/04/01 17:03:55 [notice] 68#68: exiting
2021/04/01 17:03:55 [notice] 67#67: exit
2021/04/01 17:03:55 [notice] 64#64: exit
2021/04/01 17:03:55 [notice] 69#69: gracefully shutting down
2021/04/01 17:03:55 [notice] 70#70: gracefully shutting down
2021/04/01 17:03:55 [notice] 69#69: exiting
2021/04/01 17:03:55 [notice] 70#70: exiting
2021/04/01 17:03:55 [notice] 68#68: exit
2021/04/01 17:03:55 [notice] 69#69: exit
2021/04/01 17:03:55 [notice] 70#70: exit
2021/04/01 17:03:55 [notice] 18#18: signal 17 (SIGCHLD) received from 65
2021/04/01 17:03:55 [notice] 18#18: worker process 65 exited with code 0
2021/04/01 17:03:55 [notice] 18#18: worker process 67 exited with code 0
2021/04/01 17:03:55 [notice] 18#18: signal 29 (SIGIO) received
2021/04/01 17:03:55 [notice] 18#18: signal 17 (SIGCHLD) received from 64
2021/04/01 17:03:55 [notice] 18#18: worker process 64 exited with code 0
2021/04/01 17:03:55 [notice] 18#18: worker process 66 exited with code 0
2021/04/01 17:03:55 [notice] 18#18: worker process 69 exited with code 0
2021/04/01 17:03:55 [notice] 18#18: worker process 70 exited with code 0
2021/04/01 17:03:55 [notice] 18#18: worker process 71 exited with code 0
2021/04/01 17:03:55 [notice] 18#18: signal 29 (SIGIO) received
2021/04/01 17:03:55 [notice] 18#18: signal 17 (SIGCHLD) received from 68
2021/04/01 17:03:55 [notice] 18#18: worker process 68 exited with code 0
2021/04/01 17:03:55 [notice] 18#18: signal 29 (SIGIO) received
2021/04/01 17:04:11 [notice] 83#83: signal process started
2021/04/01 17:04:11 [notice] 18#18: signal 1 (SIGHUP) received from 83, reconfiguring
2021/04/01 17:04:11 [notice] 18#18: reconfiguring
2021/04/01 17:04:11 [notice] 18#18: using the "epoll" event method
2021/04/01 17:04:11 [notice] 18#18: start worker processes
2021/04/01 17:04:11 [notice] 18#18: start worker process 84
2021/04/01 17:04:11 [notice] 18#18: start worker process 85
2021/04/01 17:04:11 [notice] 18#18: start worker process 86
2021/04/01 17:04:11 [notice] 18#18: start worker process 87
2021/04/01 17:04:11 [notice] 18#18: start worker process 88
2021/04/01 17:04:11 [notice] 18#18: start worker process 89
2021/04/01 17:04:11 [notice] 18#18: start worker process 90
2021/04/01 17:04:11 [notice] 18#18: start worker process 91
2021/04/01 17:04:11 [notice] 80#80: gracefully shutting down
2021/04/01 17:04:11 [notice] 79#79: gracefully shutting down
2021/04/01 17:04:11 [notice] 79#79: exiting
2021/04/01 17:04:11 [notice] 80#80: exiting
2021/04/01 17:04:11 [notice] 74#74: gracefully shutting down
2021/04/01 17:04:11 [notice] 74#74: exiting
2021/04/01 17:04:11 [notice] 77#77: gracefully shutting down
2021/04/01 17:04:11 [notice] 77#77: exiting
2021/04/01 17:04:11 [notice] 74#74: exit
2021/04/01 17:04:11 [notice] 79#79: exit
2021/04/01 17:04:11 [notice] 80#80: exit
2021/04/01 17:04:11 [notice] 76#76: gracefully shutting down
2021/04/01 17:04:11 [notice] 76#76: exiting
2021/04/01 17:04:11 [notice] 77#77: exit
2021/04/01 17:04:11 [notice] 78#78: gracefully shutting down
2021/04/01 17:04:11 [notice] 78#78: exiting
2021/04/01 17:04:11 [notice] 76#76: exit
2021/04/01 17:04:11 [notice] 78#78: exit
2021/04/01 17:04:11 [notice] 75#75: gracefully shutting down
2021/04/01 17:04:11 [notice] 75#75: exiting
2021/04/01 17:04:11 [notice] 81#81: gracefully shutting down
2021/04/01 17:04:11 [notice] 81#81: exiting
2021/04/01 17:04:11 [notice] 75#75: exit
2021/04/01 17:04:11 [notice] 81#81: exit
2021/04/01 17:04:11 [notice] 18#18: signal 17 (SIGCHLD) received from 75
2021/04/01 17:04:11 [notice] 18#18: worker process 75 exited with code 0
2021/04/01 17:04:11 [notice] 18#18: worker process 77 exited with code 0
2021/04/01 17:04:11 [notice] 18#18: worker process 80 exited with code 0
2021/04/01 17:04:11 [notice] 18#18: signal 29 (SIGIO) received
2021/04/01 17:04:11 [notice] 18#18: signal 17 (SIGCHLD) received from 77
2021/04/01 17:04:11 [notice] 18#18: worker process 74 exited with code 0
2021/04/01 17:04:11 [notice] 18#18: signal 29 (SIGIO) received
2021/04/01 17:04:11 [notice] 18#18: signal 17 (SIGCHLD) received from 78
2021/04/01 17:04:11 [notice] 18#18: worker process 76 exited with code 0
2021/04/01 17:04:11 [notice] 18#18: worker process 78 exited with code 0
2021/04/01 17:04:11 [notice] 18#18: worker process 79 exited with code 0
2021/04/01 17:04:11 [notice] 18#18: signal 29 (SIGIO) received
2021/04/01 17:04:11 [notice] 18#18: signal 17 (SIGCHLD) received from 81
2021/04/01 17:04:11 [notice] 18#18: worker process 81 exited with code 0
2021/04/01 17:04:11 [notice] 18#18: signal 29 (SIGIO) received
2021/04/01 17:04:18 [notice] 93#93: signal process started
2021/04/01 17:04:18 [notice] 18#18: signal 1 (SIGHUP) received from 93, reconfiguring
2021/04/01 17:04:18 [notice] 18#18: reconfiguring
2021/04/01 17:04:18 [notice] 18#18: using the "epoll" event method
2021/04/01 17:04:18 [notice] 18#18: start worker processes
2021/04/01 17:04:18 [notice] 18#18: start worker process 94
2021/04/01 17:04:18 [notice] 18#18: start worker process 95
2021/04/01 17:04:18 [notice] 18#18: start worker process 96
2021/04/01 17:04:18 [notice] 18#18: start worker process 97
2021/04/01 17:04:18 [notice] 18#18: start worker process 98
2021/04/01 17:04:18 [notice] 18#18: start worker process 99
2021/04/01 17:04:18 [notice] 18#18: start worker process 100
2021/04/01 17:04:18 [notice] 18#18: start worker process 101
2021/04/01 17:04:19 [notice] 84#84: gracefully shutting down
2021/04/01 17:04:19 [notice] 86#86: gracefully shutting down
2021/04/01 17:04:19 [notice] 86#86: exiting
2021/04/01 17:04:19 [notice] 85#85: gracefully shutting down
2021/04/01 17:04:19 [notice] 84#84: exiting
2021/04/01 17:04:19 [notice] 85#85: exiting
2021/04/01 17:04:19 [notice] 86#86: exit
2021/04/01 17:04:19 [notice] 84#84: exit
2021/04/01 17:04:19 [notice] 87#87: gracefully shutting down
2021/04/01 17:04:19 [notice] 90#90: gracefully shutting down
2021/04/01 17:04:19 [notice] 90#90: exiting
2021/04/01 17:04:19 [notice] 87#87: exiting
2021/04/01 17:04:19 [notice] 85#85: exit
2021/04/01 17:04:19 [notice] 88#88: gracefully shutting down
2021/04/01 17:04:19 [notice] 87#87: exit
2021/04/01 17:04:19 [notice] 90#90: exit
2021/04/01 17:04:19 [notice] 88#88: exiting
2021/04/01 17:04:19 [notice] 89#89: gracefully shutting down
2021/04/01 17:04:19 [notice] 89#89: exiting
2021/04/01 17:04:19 [notice] 88#88: exit
2021/04/01 17:04:19 [notice] 89#89: exit
2021/04/01 17:04:19 [notice] 91#91: gracefully shutting down
2021/04/01 17:04:19 [notice] 91#91: exiting
2021/04/01 17:04:19 [notice] 91#91: exit
2021/04/01 17:04:19 [notice] 18#18: signal 17 (SIGCHLD) received from 84
2021/04/01 17:04:19 [notice] 18#18: worker process 84 exited with code 0
2021/04/01 17:04:19 [notice] 18#18: worker process 87 exited with code 0
2021/04/01 17:04:19 [notice] 18#18: worker process 90 exited with code 0
2021/04/01 17:04:19 [notice] 18#18: signal 29 (SIGIO) received
2021/04/01 17:04:19 [notice] 18#18: signal 17 (SIGCHLD) received from 89
2021/04/01 17:04:19 [notice] 18#18: worker process 85 exited with code 0
2021/04/01 17:04:19 [notice] 18#18: worker process 86 exited with code 0
2021/04/01 17:04:19 [notice] 18#18: worker process 89 exited with code 0
2021/04/01 17:04:19 [notice] 18#18: worker process 91 exited with code 0
2021/04/01 17:04:19 [notice] 18#18: signal 29 (SIGIO) received
2021/04/01 17:04:19 [notice] 18#18: signal 17 (SIGCHLD) received from 91
2021/04/01 17:04:19 [notice] 18#18: signal 17 (SIGCHLD) received from 88
2021/04/01 17:04:19 [notice] 18#18: worker process 88 exited with code 0
2021/04/01 17:04:19 [notice] 18#18: signal 29 (SIGIO) received
.....
测试验证
[root@node-nfs ~]# echo 192.168.174.132 demo.xadocker.cn >> /etc/hosts
[root@node-nfs ~]# curl demo.xadocker.cn
nginx1-859486d7bb-7hc5b 10.100.166.169
[root@node-nfs ~]# curl demo.xadocker.cn
nginx1-859486d7bb-z2sml 10.100.166.171
[root@node-nfs ~]# curl demo.xadocker.cn
nginx1-859486d7bb-jcdlr 10.100.166.184
# 注意添加到ingress的backend是 service 对应的 endpoints,而不是 service的ip
[root@node1 ingress]# kubectl describe ingress ingress-demo
Name: ingress-demo
Namespace: default
Address:
Default backend: default-http-backend:80 (<error: endpoints "default-http-backend" not found>)
Rules:
Host Path Backends
---- ---- --------
demo.xadocker.cn
/ nginx1-service:80 (10.100.166.169:80,10.100.166.171:80,10.100.166.172:80 + 2 more...)
Annotations: Events:
Type Reason Age From Message
---- ------ ---- ---- -------
Normal AddedOrUpdated 30m nginx-ingress-controller Configuration for default/ingress-demo was added or updated
正文完
隐私政策
留言板
金色传说
kubernetes
terraform
云生原
helm
代码编程
Java
Python
Shell
DevOps
Ansible
Gitlab
Jenkins
运维
老司机
Linux 杂锦
Nginx
数据库
elasticsearch
监控
上帝视角
DJI FPV
DJI mini 3 pro
关于本站