Subsections of Deploy App

Deploy CCDS Server

Preliminary

  • MariaDB has installed though argo-workflow, if not check link
  • Redis has installed though argo-workflow, if not check link
  • And init mariadb has finished, if not check link
  • nfs-external-nas nas server has initialized as somewhere, if not check link

Steps

1. decode mariadb password

kubectl -n application get secret mariadb-credentials -o jsonpath='{.data.mariadb-root-password}' | base64 -d

2. prepare combo-data-pvc.yaml

---
apiVersion: "v1"
kind: "PersistentVolumeClaim"
metadata:
  name: "ccds-data-pvc"
  namespace: "application"
spec:
  accessModes:
  - "ReadWriteMany"
  resources:
    requests:
      storage: "200Gi"
  storageClassName: "nfs-external-nas"
status:
  accessModes:
  - "ReadWriteMany"
  capacity:
    storage: "200Gi"
---
apiVersion: "v1"
kind: "PersistentVolumeClaim"
metadata:
  name: "csst-data-pvc"
  namespace: "application"
spec:
  accessModes:
  - "ReadWriteMany"
  resources:
    requests:
      storage: "200Gi"
  storageClassName: "nfs-external-nas"
status:
  accessModes:
  - "ReadWriteMany"
  capacity:
    storage: "200Gi"

3. prepare deploy-ccds-server.yaml

apiVersion: argoproj.io/v1alpha1
kind: Workflow
metadata:
  generateName: deploy-ccds-
spec:
  entrypoint: entry
  artifactRepositoryRef:
    configmap: artifact-repositories
    key: default-artifact-repository
  serviceAccountName: argo-workflow
  templates:
  - name: entry
    inputs:
      parameters:
      - name: argocd-server
        value: argo-cd-argocd-server.argocd:443
      - name: insecure-option
        value: --insecure
    dag:
      tasks:
      - name: apply
        template: apply
      - name: prepare-argocd-binary
        template: prepare-argocd-binary
        dependencies:
        - apply
      - name: sync
        dependencies:
        - prepare-argocd-binary
        template: sync
        arguments:
          artifacts:
          - name: argocd-binary
            from: "{{tasks.prepare-argocd-binary.outputs.artifacts.argocd-binary}}"
          parameters:
          - name: argocd-server
            value: "{{inputs.parameters.argocd-server}}"
          - name: insecure-option
            value: "{{inputs.parameters.insecure-option}}"
      - name: wait
        dependencies:
        - sync
        template: wait
        arguments:
          artifacts:
          - name: argocd-binary
            from: "{{tasks.prepare-argocd-binary.outputs.artifacts.argocd-binary}}"
          parameters:
          - name: argocd-server
            value: "{{inputs.parameters.argocd-server}}"
          - name: insecure-option
            value: "{{inputs.parameters.insecure-option}}"
  - name: apply
    resource:
      action: apply
      manifest: |
        apiVersion: argoproj.io/v1alpha1
        kind: Application
        metadata:
          name: ccds-server
          namespace: argocd
        spec:
          syncPolicy:
            syncOptions:
            - CreateNamespace=true
          project: default
          source:
            repoURL: https://charts.bitnami.com/bitnami
            chart: nginx
            targetRevision: 15.10.4
            helm:
              releaseName: ccds-server
              values: |
                image:
                  registry: cr.registry.res.cloud.wuxi-yqgcy.cn
                  repository: csst/ccds
                  tag: V1-argo-test
                  pullPolicy: IfNotPresent
                extraEnvVars:
                  - name: TZ
                    value: Asia/Shanghai
                  - name: FLASK_DEBUG
                    value: "0"
                  - name: FLASK_ENV
                    value: "production"
                  - name: DATABASE_URL
                    value: "mysql://root:IqzfDQfjkzfNhsCS@app-mariadb.application:3306/ccds?charset=utf8"
                  - name: REDIS_HOST
                    value: "app-redis-master.application"
                  - name: REDIS_PWD
                    value: "THY7BxnEIOeecarE"
                  - name: REDIS_PORT
                    value: "6379"
                  - name: CSST_DFS_API_MODE
                    value: "cluster"
                  - name: CSST_DFS_GATEWAY
                    value: "csst-gateway.csst:80"
                  - name: CSST_DFS_APP_ID
                    value: "test"
                  - name: CSST_DFS_APP_TOKEN
                    value: "test"
                containerSecurityContext:
                  enabled: false
                replicaCount: 1
                containerPorts:
                  http: 9000
                extraVolumes:
                  - name: csst-data-pvc
                    persistentVolumeClaim:
                      claimName: csst-data-pvc
                  - name: ccds-data-pvc
                    persistentVolumeClaim:
                      claimName: ccds-data-pvc
                extraVolumeMounts:
                  - mountPath: /csst-data
                    name: csst-data-pvc
                  - mountPath: /ccds-data
                    name: ccds-data-pvc
                service:
                  type: ClusterIP
                  ports:
                    http: 9000
                  targetPort:
                    http: 9000
          destination:
            server: https://kubernetes.default.svc
            namespace: application
  - name: prepare-argocd-binary
    inputs:
      artifacts:
      - name: argocd-binary
        path: /tmp/argocd
        mode: 755
        http:
          url: https://files.m.daocloud.io/github.com/argoproj/argo-cd/releases/download/v2.9.3/argocd-linux-amd64
    outputs:
      artifacts:
      - name: argocd-binary
        path: "{{inputs.artifacts.argocd-binary.path}}"
    container:
      image: m.daocloud.io/docker.io/library/fedora:39
      command:
      - sh
      - -c
      args:
      - |
        ls -l {{inputs.artifacts.argocd-binary.path}}
  - name: sync
    inputs:
      artifacts:
      - name: argocd-binary
        path: /usr/local/bin/argocd
      parameters:
      - name: argocd-server
      - name: insecure-option
        value: ""
    container:
      image: m.daocloud.io/docker.io/library/fedora:39
      env:
      - name: ARGOCD_USERNAME
        valueFrom:
          secretKeyRef:
            name: argocd-login-credentials
            key: username
      - name: ARGOCD_PASSWORD
        valueFrom:
          secretKeyRef:
            name: argocd-login-credentials
            key: password
      - name: WITH_PRUNE_OPTION
        value: --prune
      command:
      - sh
      - -c
      args:
      - |
        set -e
        export ARGOCD_SERVER={{inputs.parameters.argocd-server}}
        export INSECURE_OPTION={{inputs.parameters.insecure-option}}
        export ARGOCD_USERNAME=${ARGOCD_USERNAME:-admin}
        argocd login ${INSECURE_OPTION} --username ${ARGOCD_USERNAME} --password ${ARGOCD_PASSWORD} ${ARGOCD_SERVER}
        argocd app sync argocd/ccds-server ${WITH_PRUNE_OPTION} --timeout 300
  - name: wait
    inputs:
      artifacts:
      - name: argocd-binary
        path: /usr/local/bin/argocd
      parameters:
      - name: argocd-server
      - name: insecure-option
        value: ""
    container:
      image: m.daocloud.io/docker.io/library/fedora:39
      env:
      - name: ARGOCD_USERNAME
        valueFrom:
          secretKeyRef:
            name: argocd-login-credentials
            key: username
      - name: ARGOCD_PASSWORD
        valueFrom:
          secretKeyRef:
            name: argocd-login-credentials
            key: password
      command:
      - sh
      - -c
      args:
      - |
        set -e
        export ARGOCD_SERVER={{inputs.parameters.argocd-server}}
        export INSECURE_OPTION={{inputs.parameters.insecure-option}}
        export ARGOCD_USERNAME=${ARGOCD_USERNAME:-admin}
        argocd login ${INSECURE_OPTION} --username ${ARGOCD_USERNAME} --password ${ARGOCD_PASSWORD} ${ARGOCD_SERVER}
        argocd app wait argocd/ccds-server

4. create pvc resource

kubectl -n application apply -f combo-data-pvc.yaml

5. subimit to argo workflow client

argo -n business-workflows submit deploy-ccds-server.yaml

Deploy DFS Server

Preliminary

  • MariaDB has installed though argo-workflow, if not check link
  • Postgresql has installed though argo-workflow, if not check link
  • And init mariadb has finished, if not check link
  • And init postgresql has finished, if not check link

Steps

0. prepare csst.yaml

global:
  fitsFileRootDir: /opt/temp/csst/fits_file
  fileExternalPrefix: http://csst.astrolab.cn/file
  tempDir: /tmp
etcd:
  host: 0.0.0.0
  port: 2379

redisChannel0:
  host: app-redis-master.application
  port: 6379
  db_id: 0
  channel0: channel0
  passwd:
  level0_list: single-image-reduction:data0

csst_db:
  enabled: true
  host: 172.27.253.66
  port: 30173
  user: "postgres"
  passwd: "woQ8btfS44ei1Bbx"
  db: "csst"
  maxIdleConnection: 100
  maxOpenConnection: 130
  connMaxLifetime: 100

csst_db_seq:
  enabled: true
  host: 172.27.253.66
  port: 30173
  user: "postgres"
  passwd: "woQ8btfS44ei1Bbx"
  db: "csst"
  maxIdleConnection: 100
  maxOpenConnection: 130
  connMaxLifetime: 100

csst_ck:
  enabled: true
  url: tcp://app-clickhouse-service-external.application:30900?compress=true
  host: app-clickhouse-service-external
  clusters: app-clickhouse-service-external:30900
  port: 30900
  db: csst
  user: admin
  passwd: "YEkvhrhEaeZTf7E0"

gateway:
  enabled: true
  url: csst-gateway.application:31280

ephem_ck_db:
  enabled: true
  url: tcp://app-clickhouse-service-external.application:30900?compress=true
  host: app-clickhouse-service-external
  port: 30900
  db: ephem
  user: admin
  passwd: "YEkvhrhEaeZTf7E0"
  maxIdleConnection: 100
  maxOpenConnection: 130
  connMaxLifetime: 100

csst_doris_db:
  enabled: true
  host: app-mariadb
  port: 3306
  user: "root"
  passwd: "IqzfDQfjkzfNhsCS"
  db: "ccds"
  maxIdleConnection: 100
  maxOpenConnection: 130
  connMaxLifetime: 100

redis:
  enabled: true
  conn: app-redis-master.application:6379
  dbNum: 8
  password:
  timeout: 3000
  sentinel:
    master: csstMaster
    nodes: app-redis-master.application:6379

jwt:
  secretKey: W6VjDud2W1kMG3BicbMNlGgI4ZfcoHtMGLWr

auth_srv:
  name: net.cnlab.csst.srv.auth.
  address:
  port: 9030
zap:
  level: error
  development: true
  logFileDir:
  outputPaths: []
  maxSize: 50
  maxBackups: 200
  maxAge: 10

dfs_srv:
  name: net.cnlab.csst.srv.dfs-srv.
  address:
  port: 9100

ephem_srv:
  name: net.cnlab.csst.srv.ephem.
  address:
  port: 9060

ephem_rest:
  name: net.cnlab.csst.srv.ephem-rest.
  address:
  port: 9068

user_srv:
  name: net.cnlab.csst.srv.user.
  address:
  port: 9090

fits_srv:
  name: net.cnlab.csst.srv.fits.
  address:
  port: 9002

1. creat csst-credentials

kubectl -n application create secret generic csst-credentials --from-file=./csst.yaml

2. [Optional] prepare csst-data-pvc.yaml

apiVersion: "v1"
kind: "PersistentVolumeClaim"
metadata:
  name: "csst-data-pvc"
  namespace: "application"
spec:
  accessModes:
  - "ReadWriteMany"
  resources:
    requests:
      storage: "200Gi"
  storageClassName: "nfs-external-nas"
status:
  accessModes:
  - "ReadWriteMany"
  capacity:
    storage: "200Gi"

3. prepare deploy-dfs-server.yaml

apiVersion: argoproj.io/v1alpha1
kind: Workflow
metadata:
  generateName: deploy-dfs-server-
spec:
  entrypoint: entry
  artifactRepositoryRef:
    configmap: artifact-repositories
    key: default-artifact-repository
  serviceAccountName: argo-workflow
  templates:
  - name: entry
    inputs:
      parameters:
      - name: argocd-server
        value: argo-cd-argocd-server.argocd:443
      - name: insecure-option
        value: --insecure
    dag:
      tasks:
      - name: apply
        template: apply
      - name: prepare-argocd-binary
        template: prepare-argocd-binary
        dependencies:
        - apply
      - name: sync
        dependencies:
        - prepare-argocd-binary
        template: sync
        arguments:
          artifacts:
          - name: argocd-binary
            from: "{{tasks.prepare-argocd-binary.outputs.artifacts.argocd-binary}}"
          parameters:
          - name: argocd-server
            value: "{{inputs.parameters.argocd-server}}"
          - name: insecure-option
            value: "{{inputs.parameters.insecure-option}}"
      - name: wait
        dependencies:
        - sync
        template: wait
        arguments:
          artifacts:
          - name: argocd-binary
            from: "{{tasks.prepare-argocd-binary.outputs.artifacts.argocd-binary}}"
          parameters:
          - name: argocd-server
            value: "{{inputs.parameters.argocd-server}}"
          - name: insecure-option
            value: "{{inputs.parameters.insecure-option}}"
  - name: apply
    resource:
      action: apply
      manifest: |
        apiVersion: argoproj.io/v1alpha1
        kind: Application
        metadata:
          name: dfs-server
          namespace: argocd
        spec:
          syncPolicy:
            syncOptions:
              - CreateNamespace=true
          project: default
          source:
            repoURL: https://charts.bitnami.com/bitnami
            chart: nginx
            targetRevision: 15.10.4
            helm:
              releaseName: dfs-server
              values: |
                image:
                  registry: cr.registry.res.cloud.wuxi-yqgcy.cn
                  repository: mirror/dfs-server
                  tag: v240306-r1
                  pullPolicy: IfNotPresent
                extraEnvVars:
                  - name: ZONEINFO
                    value: /opt/zoneinfo.zip
                  - name: TZ
                    value: Asia/Shanghai
                  - name: CONFIG_FILE_PATH
                    value: /app/csst.yaml
                  - name: PYTHONPATH
                    value: /work/csst-py/:/work/csst-py/dfs-srv:/work/packages:/work/csst-dfs-proto-py:/work/csst-dfs-commons:/work/csst-dfs-base
                containerSecurityContext:
                  enabled: false
                containerPorts:
                  http: 9100
                extraVolumes:
                  - name: csst-data-pvc
                    persistentVolumeClaim:
                      claimName: csst-data-pvc
                  - name: dfs-csst-config
                    secret:
                      secretName: csst-credentials
                extraVolumeMounts:
                  - mountPath: /share/dfs
                    name: csst-data-pvc
                  - mountPath: /app
                    name: dfs-csst-config
                service:
                  type: ClusterIP
                  ports:
                    http: 9100
                  targetPort:
                    http: http
                ingress:
                  enabled: false
          destination:
            server: https://kubernetes.default.svc
            namespace: application
  - name: prepare-argocd-binary
    inputs:
      artifacts:
      - name: argocd-binary
        path: /tmp/argocd
        mode: 755
        http:
          url: https://files.m.daocloud.io/github.com/argoproj/argo-cd/releases/download/v2.9.3/argocd-linux-amd64
    outputs:
      artifacts:
      - name: argocd-binary
        path: "{{inputs.artifacts.argocd-binary.path}}"
    container:
      image: m.daocloud.io/docker.io/library/fedora:39
      command:
      - sh
      - -c
      args:
      - |
        ls -l {{inputs.artifacts.argocd-binary.path}}
  - name: sync
    inputs:
      artifacts:
      - name: argocd-binary
        path: /usr/local/bin/argocd
      parameters:
      - name: argocd-server
      - name: insecure-option
        value: ""
    container:
      image: m.daocloud.io/docker.io/library/fedora:39
      env:
      - name: ARGOCD_USERNAME
        valueFrom:
          secretKeyRef:
            name: argocd-login-credentials
            key: username
      - name: ARGOCD_PASSWORD
        valueFrom:
          secretKeyRef:
            name: argocd-login-credentials
            key: password
      - name: WITH_PRUNE_OPTION
        value: --prune
      command:
      - sh
      - -c
      args:
      - |
        set -e
        export ARGOCD_SERVER={{inputs.parameters.argocd-server}}
        export INSECURE_OPTION={{inputs.parameters.insecure-option}}
        export ARGOCD_USERNAME=${ARGOCD_USERNAME:-admin}
        argocd login ${INSECURE_OPTION} --username ${ARGOCD_USERNAME} --password ${ARGOCD_PASSWORD} ${ARGOCD_SERVER}
        argocd app sync argocd/dfs-server ${WITH_PRUNE_OPTION} --timeout 300
  - name: wait
    inputs:
      artifacts:
      - name: argocd-binary
        path: /usr/local/bin/argocd
      parameters:
      - name: argocd-server
      - name: insecure-option
        value: ""
    container:
      image: m.daocloud.io/docker.io/library/fedora:39
      env:
      - name: ARGOCD_USERNAME
        valueFrom:
          secretKeyRef:
            name: argocd-login-credentials
            key: username
      - name: ARGOCD_PASSWORD
        valueFrom:
          secretKeyRef:
            name: argocd-login-credentials
            key: password
      command:
      - sh
      - -c
      args:
      - |
        set -e
        export ARGOCD_SERVER={{inputs.parameters.argocd-server}}
        export INSECURE_OPTION={{inputs.parameters.insecure-option}}
        export ARGOCD_USERNAME=${ARGOCD_USERNAME:-admin}
        argocd login ${INSECURE_OPTION} --username ${ARGOCD_USERNAME} --password ${ARGOCD_PASSWORD} ${ARGOCD_SERVER}
        argocd app wait argocd/dfs-server

4. [Optional] create pvc resource

kubectl -n application apply -f csst-data-pvc.yaml

5. subimit to argo workflow client

argo -n business-workflows submit deploy-dfs-server.yaml

Deploy DFS Ephem

Preliminary

  • MariaDB has installed though argo-workflow, if not check link
  • Postgresql has installed though argo-workflow, if not check link
  • And init mariadb has finished, if not check link
  • And init postgresql has finished, if not check link

Steps

1. [Optional] creat csst-credentials

kubectl -n business-workflows create secret generic csst-credentials --from-file=./csst.yaml

3. prepare deploy-dfs-ephem.yaml

apiVersion: argoproj.io/v1alpha1
kind: Workflow
metadata:
  generateName: deploy-dfs-ephem-
spec:
  entrypoint: entry
  artifactRepositoryRef:
    configmap: artifact-repositories
    key: default-artifact-repository
  serviceAccountName: argo-workflow
  templates:
  - name: entry
    inputs:
      parameters:
      - name: argocd-server
        value: argo-cd-argocd-server.argocd:443
      - name: insecure-option
        value: --insecure
    dag:
      tasks:
      - name: apply
        template: apply
      - name: prepare-argocd-binary
        template: prepare-argocd-binary
        dependencies:
        - apply
      - name: sync
        dependencies:
        - prepare-argocd-binary
        template: sync
        arguments:
          artifacts:
          - name: argocd-binary
            from: "{{tasks.prepare-argocd-binary.outputs.artifacts.argocd-binary}}"
          parameters:
          - name: argocd-server
            value: "{{inputs.parameters.argocd-server}}"
          - name: insecure-option
            value: "{{inputs.parameters.insecure-option}}"
      - name: wait
        dependencies:
        - sync
        template: wait
        arguments:
          artifacts:
          - name: argocd-binary
            from: "{{tasks.prepare-argocd-binary.outputs.artifacts.argocd-binary}}"
          parameters:
          - name: argocd-server
            value: "{{inputs.parameters.argocd-server}}"
          - name: insecure-option
            value: "{{inputs.parameters.insecure-option}}"
  - name: apply
    resource:
      action: apply
      manifest: |
        apiVersion: argoproj.io/v1alpha1
        kind: Application
        metadata:
          name: dfs-ephem
          namespace: argocd
        spec:
          syncPolicy:
            syncOptions:
              - CreateNamespace=true
          project: default
          source:
            repoURL: https://charts.bitnami.com/bitnami
            chart: nginx
            targetRevision: 15.10.4
            helm:
              releaseName: dfs-ephem
              values: |
                image:
                  registry: cr.registry.res.cloud.wuxi-yqgcy.cn
                  repository: mirror/dfs-server
                  tag: v240306-r1
                  pullPolicy: IfNotPresent
                command:
                  - python
                args:
                  - /work/csst-py/ephem-srv/csst_ephem_srv/server.py
                extraEnvVars:
                  - name: ZONEINFO
                    value: /opt/zoneinfo.zip
                  - name: TZ
                    value: Asia/Shanghai
                  - name: CONFIG_FILE_PATH
                    value: /app/csst.yaml
                  - name: CONFIG_SERVER
                    value: "cdfs-config.csst:9610"
                  - name: PYTHONPATH
                    value: /work/csst-py/ephem-srv:/work/csst-py/:/work/csst-py/dfs-srv:/work/packages:/work/csst-dfs-proto-py:/work/csst-dfs-commons:/work/csst-dfs-base
                containerSecurityContext:
                  enabled: false
                replicaCount: 1
                containerPorts:
                  http: 9060
                service:
                  type: ClusterIP
                  ports:
                    http: 9060
                  targetPort:
                    http: http
                ingress:
                  enabled: false
                extraVolumes:
                  - name: dfs-csst-config
                    secret:
                      secretName: csst-credentials
                extraVolumeMounts:
                  - mountPath: /app
                    name: dfs-csst-config
          destination:
            server: https://kubernetes.default.svc
            namespace: application
  - name: prepare-argocd-binary
    inputs:
      artifacts:
      - name: argocd-binary
        path: /tmp/argocd
        mode: 755
        http:
          url: https://files.m.daocloud.io/github.com/argoproj/argo-cd/releases/download/v2.9.3/argocd-linux-amd64
    outputs:
      artifacts:
      - name: argocd-binary
        path: "{{inputs.artifacts.argocd-binary.path}}"
    container:
      image: m.daocloud.io/docker.io/library/fedora:39
      command:
      - sh
      - -c
      args:
      - |
        ls -l {{inputs.artifacts.argocd-binary.path}}
  - name: sync
    inputs:
      artifacts:
      - name: argocd-binary
        path: /usr/local/bin/argocd
      parameters:
      - name: argocd-server
      - name: insecure-option
        value: ""
    container:
      image: m.daocloud.io/docker.io/library/fedora:39
      env:
      - name: ARGOCD_USERNAME
        valueFrom:
          secretKeyRef:
            name: argocd-login-credentials
            key: username
      - name: ARGOCD_PASSWORD
        valueFrom:
          secretKeyRef:
            name: argocd-login-credentials
            key: password
      - name: WITH_PRUNE_OPTION
        value: --prune
      command:
      - sh
      - -c
      args:
      - |
        set -e
        export ARGOCD_SERVER={{inputs.parameters.argocd-server}}
        export INSECURE_OPTION={{inputs.parameters.insecure-option}}
        export ARGOCD_USERNAME=${ARGOCD_USERNAME:-admin}
        argocd login ${INSECURE_OPTION} --username ${ARGOCD_USERNAME} --password ${ARGOCD_PASSWORD} ${ARGOCD_SERVER}
        argocd app sync argocd/dfs-ephem ${WITH_PRUNE_OPTION} --timeout 300
  - name: wait
    inputs:
      artifacts:
      - name: argocd-binary
        path: /usr/local/bin/argocd
      parameters:
      - name: argocd-server
      - name: insecure-option
        value: ""
    container:
      image: m.daocloud.io/docker.io/library/fedora:39
      env:
      - name: ARGOCD_USERNAME
        valueFrom:
          secretKeyRef:
            name: argocd-login-credentials
            key: username
      - name: ARGOCD_PASSWORD
        valueFrom:
          secretKeyRef:
            name: argocd-login-credentials
            key: password
      command:
      - sh
      - -c
      args:
      - |
        set -e
        export ARGOCD_SERVER={{inputs.parameters.argocd-server}}
        export INSECURE_OPTION={{inputs.parameters.insecure-option}}
        export ARGOCD_USERNAME=${ARGOCD_USERNAME:-admin}
        argocd login ${INSECURE_OPTION} --username ${ARGOCD_USERNAME} --password ${ARGOCD_PASSWORD} ${ARGOCD_SERVER}
        argocd app wait argocd/dfs-ephem

3. subimit to argo workflow client

argo -n business-workflows submit deploy-dfs-ephem.yaml

Deploy CSST Gateway

Preliminary

  • DFS server has installed though argo-workflow, if not check link
  • DFS ephem has installed though argo-workflow, if not check link
Warning

if the dfs server, dfs ephem and namespace isn’t match, you might need to modify following shell.

Steps

1. prepare csst-gateway.configmap.yaml

worker_processes auto;

events {
  worker_connections 1024;
}

http {
  log_format  main  '$remote_addr - $remote_user [$time_local] "$request" $status $body_bytes_sent "$http_referer" "$http_user_agent" "$http_x_forwarded_for"';

  default_type  application/octet-stream;

  sendfile              on;
  tcp_nopush            on;
  tcp_nodelay           on;

  client_max_body_size  10000m;
  types_hash_max_size   2048;
  underscores_in_headers on;
  reset_timedout_connection on; 

  keepalive_timeout     960;
  client_header_timeout 960;
  client_body_timeout   960; 
  proxy_connect_timeout 960;
  proxy_read_timeout 960;
  proxy_send_timeout 960;
  send_timeout 960;

  upstream grpc_dfs {
    server dfs-server-nginx.application:9100 weight=1;
  }
  upstream grpc_ephem {
    server dfs-ephem-nginx.application:9060 weight=1;
  }

  server {
    listen 80 http2;
    location ^~ /dfs. {
      grpc_pass_header  Host;
      grpc_pass_header  X-Real-IP;
      grpc_set_header Host $host;
      grpc_set_header X-Real-IP $remote_addr;
      grpc_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
      grpc_socket_keepalive on;
      grpc_read_timeout 960;
      grpc_send_timeout 960;
      proxy_read_timeout 960;
      proxy_send_timeout 960;
      grpc_pass grpc://grpc_dfs;
    }
    location ^~ /dfs.ephem. {
      grpc_pass_header  Host;
      grpc_pass_header  X-Real-IP;
      grpc_set_header Host $host;
      grpc_set_header X-Real-IP $remote_addr;
      grpc_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
      grpc_socket_keepalive on;
      grpc_read_timeout 960;
      grpc_send_timeout 960;
      proxy_read_timeout 960;
      proxy_send_timeout 960;
      grpc_pass grpc://grpc_ephem;
    }
  }

  server {
    listen 81;
    location /search/v2 {
      proxy_pass        http://0.0.0.0:9068/search;
      proxy_pass_request_headers      on;
      proxy_set_header   Host $host;
      proxy_set_header   X-Real-IP $remote_addr;
      proxy_set_header   X-Forwarded-For $proxy_add_x_forwarded_for;
      proxy_set_header   X-Forwarded-Host $server_name;
    }
    location / {
      root /share/dfs;
      autoindex on;
    }
  }
}

2. [Optional] prepare csst-data-pvc.yaml

apiVersion: "v1"
kind: "PersistentVolumeClaim"
metadata:
  name: "csst-data-pvc"
  namespace: "application"
spec:
  accessModes:
  - "ReadWriteMany"
  resources:
    requests:
      storage: "200Gi"
  storageClassName: "nfs-external-nas"
status:
  accessModes:
  - "ReadWriteMany"
  capacity:
    storage: "200Gi"

3. prepare deploy-csst-gateway.yaml

---
apiVersion: v1
kind: Service
metadata:
  labels:
    app.kubernetes.io/name: csst-gateway
  name: csst-gateway
  namespace: application
spec:
  ports:
    - name: http
      port: 80
      nodePort: 31280
      protocol: TCP
      targetPort: 80
    - name: search
      port: 81
      nodePort: 31281
      targetPort: 81
  selector:
    app.kubernetes.io/name: csst-gateway
  type: NodePort
---
apiVersion: apps/v1
kind: Deployment
metadata:
  labels:
    app.kubernetes.io/name: csst-gateway
  name: csst-gateway
  namespace: application
spec:
  replicas: 1
  selector:
    matchLabels:
      app.kubernetes.io/name: csst-gateway
  template:
    metadata:
      labels:
        app.kubernetes.io/name: csst-gateway
    spec:
      containers:
        - env:
            - name: TZ
              value: Asia/Shanghai
          image: docker.io/library/nginx:1.19.9-alpine
          imagePullPolicy: IfNotPresent
          name: csst-gateway
          ports:
            - containerPort: 80
              name: http
            - containerPort: 81
              name: search
          volumeMounts:
            - mountPath: /etc/nginx
              name: csst-gateway-config
            - mountPath: /share/dfs
              name: csst-data-pvc
      volumes:
        - name: csst-gateway-config
          configMap:
            name: csst-gateway-configmap
            items:
              - key: csst-gateway.configmap.yaml
                path: nginx.conf
        - name: csst-data-pvc
          persistentVolumeClaim:
            claimName: csst-data-pvc
      restartPolicy: Always
  

4. create configMap based on csst-gateway.configmap.yaml

kubectl -n application create configmap csst-gateway-configmap --from-file=csst-gateway.configmap.yaml -o yaml --dry-run=client | kubectl -n application apply -f -

5. [Optional] create pvc resource

kubectl -n application apply -f csst-data-pvc.yaml

6. apply to k8s

kubectl -n application apply -f deploy-csst-gateway.yaml