#!/bin/bash

eval "$(echo "$@" | grep -o "[^= ]\+ *= *[^ =]\+" | sed 's/ *= */=/')"

: ${EFNRS_DB_HOST="evi-core-dev"}
: ${EFNRS_DB_NAME="core"}
: ${EFNRS_DB_USER="core"}
: ${EFNRS_DB_PASS="core"}
: ${EFNRS_AUTHORISATION_ENABLED=0}
: ${ELTEXFNRS_VERSION-1.2.0-648}

#: ${EFNRS_SCUD_HOST="evi-core-dev"}
#: ${EFNRS_SCUD_PORT=8000}

#EFNRS_LOG_LEVEL_FILE=debug2
EFNRS_LOG_LEVEL_CONSOLE=debug2

# List Repositories
# curl -s -X GET -u user:000000 https://mcpe-video-group.eltex.loc:20002/v2/_catalog | jq '.repositories[]'

# evi-videoanalytics-eltexfnrs
# tensorrt
# tritonserver

GetLatestTag(){
  res=$(curl -s -X GET -u user:000000 https://mcpe-video-group.eltex.loc:20002/v2/$1/tags/list | jq '.tags[]' | tr -d \" | grep -v latest | grep "^$2" | sort -V | tail -1)
  echo "$res"
}

ELTEXFNRS_VERSION=$(GetLatestTag "evi-videoanalytics-eltexfnrs" "1.2.0-648")
#ELTEXFNRS_VERSION="latest"

#TRITON_VERSION=$(GetLatestTag "tritonserver" "")
TENSORRT_VERSION="24.05-py3"

#TENSORRT_VERSION=$(GetLatestTag "tensorrt" "")
TENSORRT_VERSION="24.05-py3"

echo $ELTEXFNRS_VERSION

read -r -d '' COMPOSE <<EOF
services:
  db:
    image: "${DOCKER_REPO_FOR_POSTGRES-nexus.eltex.loc:9010}/postgres:${POSTGRES_VERSION-14.9}"
    profiles: [inside_db]
    hostname: db
    environment:
      POSTGRES_DB: "${POSTGRES_DB-db_efnrs}"
      POSTGRES_USER: "${POSTGRES_USER-user_efnrs}"
      POSTGRES_PASSWORD: "${POSTGRES_PASSWORD-userefnrs}"
    depends_on:
      eltexfnrs:
        condition: service_started
    volumes:
      - postgres_data_vol:/var/lib/postgresql/data
    restart: always
    networks:
      - eltexfnrs-network
    privileged: true
########################################################################################################
  triton:
    image: "${DOCKER_REPO_FOR_TRITON-nexus.eltex.loc:9010}/tritonserver:${TRITON_VERSION-24.05-py3}"
    hostname: triton
    depends_on:
      tensorrt:
        condition: service_completed_successfully
    deploy:
      resources:
        reservations:
          devices:
            - driver: nvidia
              count: all
              capabilities: [gpu]
    ports:
      - "${TRITON_EXTERNAL_PORT-10000}:${TRITON_INTERNAL_PORT-8000}"
    volumes:
      - models_ready_vol:/models
    restart: always
    command: bash -c "tritonserver --model-repository=/models --load-model=* --model-control-mode=explicit"
    networks:
      - eltexfnrs-network
    privileged: true
########################################################################################################  
  eltexfnrs:
    image: "${DOCKER_REPO_FOR_ELTEXFNRS-mcpe-video-group.eltex.loc:20002}/evi-videoanalytics-eltexfnrs:${ELTEXFNRS_VERSION-latest}"
    hostname: eltexfnrs
    environment:
      EFNRS_API_HOST:                     "${EFNRS_API_HOST-0.0.0.0}"
      EFNRS_API_PORT_HTTP:                "${EFNRS_API_PORT_HTTP-9020}"
      
      EFNRS_API_PORT_JRPC:                "${EFNRS_API_PORT_JRPC-9000}"
      
      EFNRS_AUTHORISATION_ENABLED:        "${EFNRS_AUTHORISATION_ENABLED-1}"
      EFNRS_API_THREADS:                  "${EFNRS_API_THREADS-0}"
      EFNRS_WATCH_VIDEO_PATH:             "${EFNRS_WATCH_VIDEO_PATH-./watch_video.html}"
      
      EFNRS_SCUD_HOST:                    "${EFNRS_SCUD_HOST-0.0.0.0}"
      EFNRS_SCUD_PORT:                    "${EFNRS_SCUD_PORT-9040}"
      
      EFNRS_WS_HOST:                      "${EFNRS_WS_HOST-0.0.0.0}"
      EFNRS_WS_PORT:                      "${EFNRS_WS_PORT-9010}"
      
      EFNRS_TRITON_HOST:                  "${EFNRS_TRITON_HOST-triton}"
      EFNRS_TRITON_PORT:                  "${EFNRS_TRITON_PORT-8000}"
      EFNRS_TRITON_SIZE_POOL:             "${EFNRS_TRITON_SIZE_POOL-10}"
      
      EFNRS_DB_HOST:                      "${EFNRS_DB_HOST-db}"
      EFNRS_DB_PORT:                      "${EFNRS_DB_PORT-5432}"
      EFNRS_DB_NAME:                      "${EFNRS_DB_NAME-db_efnrs}"
      EFNRS_DB_USER:                      "${EFNRS_DB_USER-user_efnrs}"
      EFNRS_DB_PASS:                      "${EFNRS_DB_PASS-userefnrs}"
      EFNRS_DB_POOL_SIZE:                 "${EFNRS_DB_POOL_SIZE-8}"
      
      EFNRS_EVENTS_MAX_SIZE:              "${EFNRS_EVENTS_MAX_SIZE-0}"
      EFNRS_EVENTS_MIN_SIZE:              "${EFNRS_EVENTS_MIN_SIZE-0}"
      EFNRS_EVENTS_DAY:                   "${EFNRS_EVENTS_DAY-0}"
      EFNRS_EVENTS_REQUESTS_INTERVAL_S:   "${EFNRS_EVENTS_REQUESTS_INTERVAL_S-20}"
      
      EFNRS_START_INTEL_GPU:              "${EFNRS_START_INTEL_GPU-90}"
      EFNRS_START_NVIDIA_GPU:             "${EFNRS_START_NVIDIA_GPU-90}"
      EFNRS_START_NVIDIA_MEM:             "${EFNRS_START_NVIDIA_MEM-90}"
      
      EFNRS_INTERRUPT_INTEL_GPU:          "${EFNRS_INTERRUPT_INTEL_GPU-95}"
      EFNRS_INTERRUPT_NVIDIA_GPU:         "${EFNRS_INTERRUPT_NVIDIA_GPU-95}"
      EFNRS_INTERRUPT_NVIDIA_MEM:         "${EFNRS_INTERRUPT_NVIDIA_MEM-95}"
      
      EFNRS_LOG_LEVEL_FILE:               "${EFNRS_LOG_LEVEL_FILE-info}"
      EFNRS_LOG_LEVEL_CONSOLE:            "${EFNRS_LOG_LEVEL_CONSOLE-info}"
      EFNRS_LOG_PATH:                     "${EFNRS_LOG_PATH-./efnrs.log}"
      
      EFNRS_THREADS:                      "${EFNRS_THREADS-0}"
      EFNRS_MASTER:                       "${EFNRS_MASTER-1}"
    ports:
      - "${EFNRS_EXTERNAL_API_PORT_HTTP-9020}:${EFNRS_INTERNAL_API_PORT_HTTP-9020}"
      - "${EFNRS_EXTERNAL_API_PORT_JRPC-9000}:${EFNRS_INTERNAL_API_PORT_JRPC-9000}"
      - "${EFNRS_EXTERNAL_WS_PORT-9010}:${EFNRS_INTERNAL_WS_PORT-9010}"
      - "${EFNRS_EXTERNAL_SCUD_PORT-9040}:${EFNRS_INTERNAL_SCUD_PORT-9040}"
    deploy:
      resources:
        reservations:
          devices:
            - driver: nvidia
              count: all
              capabilities: [gpu, utility, video, compute]
    restart: always
    volumes:
      - models_sources_vol:/opt/evi-videoanalytics-eltexfnrs-docker/models_sources
      - models_ready_vol:/opt/evi-videoanalytics-eltexfnrs-docker/models
      - faces_vol:/opt/evi-videoanalytics-eltexfnrs-docker/faces
      - events_vol:/opt/evi-videoanalytics-eltexfnrs-docker/events
      - /home/$USER/eltexfnrs-logs:/opt/evi-videoanalytics-eltexfnrs-docker/log
    networks:
      - eltexfnrs-network
    privileged: true
########################################################################################################
  tensorrt:
    image: "${DOCKER_REPO_FOR_TENSORRT-nexus.eltex.loc:9010}/tensorrt:${TENSORRT_VERSION-24.05-py3}"
    hostname: tensorrt
    command: >
        bash -c "
        if [ -e /models/plans/car_plate_detector/model.plan ] && [ -e /models/plans/face_detector/model.plan ] && [ -e /models/plans/face_encoder/model.plan ] && [ -e /models/plans/plate_recognizer/model.plan ]; then
          echo -e \"Собранные модели находятся в папке приложения: /models/plans/ \" &&
          if [ -e /models-ready/car_plate_detector/1/model.plan ] && [ -e /models-ready/face_detector/1/model.plan ] && [ -e /models-ready/face_encoder/1/model.plan ] && [ -e /models-ready/plate_recognizer/1/model.plan ]; then
            echo -e \"Собранные модели находятся в папке приложения: /models-ready/ \" &&
            exit 0
          else
            echo -e \"Перенос моделей в папку: /models-ready/plans/ \" && 
            mkdir -p /models-ready/car_plate_detector/1 &&
            mkdir -p /models-ready/face_detector/1 &&
            mkdir -p /models-ready/face_encoder/1 &&
            mkdir -p /models-ready/plate_recognizer/1 &&
            cp /models/plans/car_plate_detector/model.plan /models-ready/car_plate_detector/1/model.plan &&
            cp /models/plans/face_detector/model.plan /models-ready/face_detector/1/model.plan &&
            cp /models/plans/face_encoder/model.plan /models-ready/face_encoder/1/model.plan &&
            cp /models/plans/plate_recognizer/model.plan /models-ready/plate_recognizer/1/model.plan
            exit 0
          fi
        else
          echo -e \"Создание моделей в папке: /models/plans/ \" && 
          cd /models/ &&
          mkdir -p plans && 
          cd plans/ && 
          mkdir -p car_plate_detector && 
          mkdir -p face_detector && 
          mkdir -p face_encoder && 
          mkdir -p plate_recognizer &&
          cd .. && 
          trtexec --onnx=/models/face_detector/face_detector.onnx --saveEngine=/models/plans/face_detector/model.plan --shapes=input.1:1x3x320x320 &&
          trtexec --onnx=/models/face_encoder/face_encoder.onnx --saveEngine=/models/plans/face_encoder/model.plan --optShapes=input.1:16x3x112x112 --minShapes=input.1:1x3x112x112 --maxShapes=input.1:32x3x112x112 &&
          trtexec --onnx=/models/plate_recognizer/plate_recognizer.onnx --saveEngine=/models/plans/plate_recognizer/model.plan --optShapes=x:5x3x48x320 --minShapes=x:1x3x48x320 --maxShapes=x:100x3x48x320 &&
          trtexec --onnx=/models/car_plate_detector/car_plate_detector.onnx --saveEngine=/models/plans/car_plate_detector/model.plan &&
          if [ -e /models-ready/car_plate_detector/1/model.plan ] && [ -e /models-ready/plate_recognizer/1/model.plan ] && [ -e /models-ready/face_encoder/1/model.plan ] && [ -e /models-ready/face_detector/1/model.plan ]; then
            echo -e \"Собранные модели находятся в папке приложения: /models-ready/ \" &&
            exit 0
          else
            echo -e \"Перенос моделей в папку: /models-ready/plans/ \" && 
            mkdir -p /models-ready/car_plate_detector/1 &&
            mkdir -p /models-ready/plate_recognizer/1 &&
            mkdir -p /models-ready/face_encoder/1 &&
            mkdir -p /models-ready/face_detector/1 &&
            cp /models/plans/car_plate_detector/model.plan /models-ready/car_plate_detector/1/model.plan &&
            cp /models/plans/plate_recognizer/model.plan /models-ready/plate_recognizer/1/model.plan &&
            cp /models/plans/face_encoder/model.plan /models-ready/face_encoder/1/model.plan &&
            cp /models/plans/face_detector/model.plan /models-ready/face_detector/1/model.plan
            exit 0
          fi
        fi 
        "
    deploy:
      resources:
        reservations:
          devices:
            - driver: nvidia
              count: all
              capabilities: [gpu]
    volumes:
      - models_sources_vol:/models
      - models_ready_vol:/models-ready
    restart: on-failure
    depends_on:
      - eltexfnrs
    networks:
      - eltexfnrs-network
    privileged: true
########################################################################################################      
volumes:
    models_sources_vol:
        external: false
    models_ready_vol:
        external: false
    faces_vol:
        external: false
    events_vol:
        external: false
    postgres_data_vol:
        external: false
########################################################################################################
networks:
  eltexfnrs-network:
    external: false
EOF

echo 000000 | docker login -u user --password-stdin https://mcpe-video-group.eltex.loc:20002

img=$(echo "$COMPOSE" | docker compose -f - images)

read -t 3 -p "Act: [1] - contaner and volume; [2] - Remove [1] and images [1/2]: " ANS

[[ "$ANS" -ge "1" ]] && {
  echo "$COMPOSE" | docker compose -f - down -v db triton eltexfnrs tensorrt

  echo "CONTAINET AND VOLUME DELETE."
  sleep 3

}

[[ "$ANS" -ge "2" ]] && {

  for i in $(echo "$img" | grep -o " [0-f]\+ " | sed -n 's/ //gp')
  do
    docker rmi $i
  done

  echo "IMAGES DELETE."
  sleep 3

}

echo "$COMPOSE" | docker compose -f - up -d

docker logs -n 10 -f user-eltexfnrs-1










