# Makefile for Nuclio function deployment and execution
include .env
export

# Default goal
.DEFAULT_GOAL := all

# Variables
SHELL := /bin/bash

# Default values for classification
CONCEPT_ID ?= default_concept
IMAGE_ID ?= default_image

# Number of instances for each service
INSTANCES_SKEL ?= 3
INSTANCES_CONTOUR ?= 2
INSTANCES_CLASSIFICATION ?= 3

# Kafka partition counts per topic
PARTITIONS_CONNECTOR ?= 8
PARTITIONS_SKEL ?= 8
PARTITIONS_CONTOUR ?= 4
PARTITIONS_CLASSIFICATION ?= 1
PARTITIONS_DLQ ?= 1

# Colors for output
BLUE := \033[0;34m
GREEN := \033[0;32m
RED := \033[0;31m
NC := \033[0m # No Color

HOST_IP := $(shell ipconfig getifaddr en0)
KAFKA_BROKERS := ${HOST_IP}:29092
NEO4J_PASS=111122223333

LOCAL_STORAGE=./tests/
NUCLIO_STORAGE=/opt/nuclio/shared_storage/
LOCAL_MODEL_PATH=./src/training/latest_model

DLQ_TOPIC = dlq-topic
CONNECTOR_KAFKA_TOPIC = connector-output-topic
SKELETONIZATION_KAFKA_TOPIC = skeletonization-output-topic
CONTOUR_ANALYSIS_KAFKA_TOPIC = contour-analysis-output-topic
CLASSIFICATION_KAFKA_TOPIC = classification-output-topic

TOPICS = $(CONNECTOR_KAFKA_TOPIC) $(DLQ_TOPIC) $(SKELETONIZATION_KAFKA_TOPIC) $(CONTOUR_ANALYSIS_KAFKA_TOPIC) $(CLASSIFICATION_KAFKA_TOPIC)

# Add these variables near the top of the Makefile, after other variable definitions
OPERATION ?= train
CONCEPT_NAME ?= default_concept
SESSION_ID ?= default_session
SUBCLASS ?= default_subclass

# Energy minimization option (default: true)
USE_ENERGY_MINIMIZATION ?= true

# Phony targets
.PHONY: all deploy train classify send_random_image clean help start_services create_kafka_topics list_kafka_topics send_to_connector create_neo4j_indexes list_neo4j_indexes

# Kafka-related targets
.PHONY: create_kafka_topics list_kafka_topics

VENV := natural-agi/bin
PYTHON := $(VENV)/python
PIP := $(VENV)/pip3

lib:
	@echo -e "${BLUE}Building common library...${NC}"
	@rm -rf dist build *.egg-info
	@$(PYTHON) -m build
	@$(PIP) install twine
	@$(VENV)/twine upload dist/* --verbose
	@rm -rf dist build *.egg-info
	@echo -e "${GREEN}Library built and uploaded.${NC}"
	@$(PIP) install --upgrade natural-agi-common
	@echo -e "${GREEN}Library installed.${NC}"

start_services:
	@echo -e "${BLUE}Starting Docker services...${NC}"
	@HOST_IP=${HOST_IP} docker compose up -d
	@echo -e "${GREEN}Docker services started.${NC}"
	@make create_neo4j_indexes

create_kafka_topics:
	@echo -e "${BLUE}Waiting for Kafka to be ready...${NC}"
	@until docker compose exec -T kafka kafka-topics --list --bootstrap-server ${HOST_IP}:29092 &> /dev/null; do \
		echo "Waiting for Kafka to be ready..."; \
		sleep 5; \
	done
	@echo -e "${BLUE}Creating Kafka topics...${NC}"
	@docker compose exec -T kafka kafka-topics --create --bootstrap-server ${HOST_IP}:29092 --if-not-exists --topic "${CONNECTOR_KAFKA_TOPIC}" --partitions $(PARTITIONS_CONNECTOR) --replication-factor 1 || echo "Failed to create topic: ${CONNECTOR_KAFKA_TOPIC}"
	@docker compose exec -T kafka kafka-topics --create --bootstrap-server ${HOST_IP}:29092 --if-not-exists --topic "${SKELETONIZATION_KAFKA_TOPIC}" --partitions $(PARTITIONS_SKEL) --replication-factor 1 || echo "Failed to create topic: ${SKELETONIZATION_KAFKA_TOPIC}"
	@docker compose exec -T kafka kafka-topics --create --bootstrap-server ${HOST_IP}:29092 --if-not-exists --topic "${CONTOUR_ANALYSIS_KAFKA_TOPIC}" --partitions $(PARTITIONS_CONTOUR) --replication-factor 1 || echo "Failed to create topic: ${CONTOUR_ANALYSIS_KAFKA_TOPIC}"
	@docker compose exec -T kafka kafka-topics --create --bootstrap-server ${HOST_IP}:29092 --if-not-exists --topic "${CLASSIFICATION_KAFKA_TOPIC}" --partitions $(PARTITIONS_CLASSIFICATION) --replication-factor 1 || echo "Failed to create topic: ${CLASSIFICATION_KAFKA_TOPIC}"
	@docker compose exec -T kafka kafka-topics --create --bootstrap-server ${HOST_IP}:29092 --if-not-exists --topic "${DLQ_TOPIC}" --partitions $(PARTITIONS_DLQ) --replication-factor 1 || echo "Failed to create topic: ${DLQ_TOPIC}"
	@echo -e "${GREEN}Kafka topics creation attempt completed.${NC}"

recreate_kafka_topics:
	@echo -e "${BLUE}Deleting all Kafka topics...${NC}"
	@for topic in $(TOPICS); do \
		echo "Deleting topic: $$topic"; \
		docker compose exec -T kafka kafka-topics --delete --bootstrap-server ${HOST_IP}:29092 --topic "$$topic" 2>/dev/null || echo "  Topic $$topic does not exist"; \
	done
	@echo -e "${BLUE}Waiting for topics to be fully deleted...${NC}"
	@for topic in $(TOPICS); do \
		while docker compose exec -T kafka kafka-topics --list --bootstrap-server ${HOST_IP}:29092 2>/dev/null | grep -qx "$$topic"; do \
			sleep 1; \
		done; \
	done
	@echo -e "${BLUE}Resetting consumer group offsets...${NC}"
	@for group in skeletonization-group contour-analysis-group classification-group; do \
		echo "  Deleting group $$group..."; \
		docker compose exec -T kafka kafka-consumer-groups --bootstrap-server ${HOST_IP}:29092 --group "$$group" --delete 2>/dev/null || true; \
	done
	@make create_kafka_topics

list_kafka_topics:
	@echo -e "${BLUE}Listing Kafka topics...${NC}"
	@docker compose exec kafka kafka-topics --list --bootstrap-server ${HOST_IP}:29092 || echo -e "${RED}Failed to list Kafka topics${NC}"
	@echo -e "${BLUE}Expected topics:${NC}"
	@for topic in $(TOPICS); do echo "  $$topic"; done

create_neo4j_indexes:
	@echo -e "${BLUE}Waiting for Neo4j to be ready...${NC}"
	@until docker compose exec -T server1 cypher-shell -u neo4j -p ${NEO4J_PASS} "RETURN 1" &> /dev/null; do \
		echo "Waiting for Neo4j Bolt to be ready..."; \
		sleep 3; \
	done
	@echo -e "${BLUE}Creating Neo4j property indexes...${NC}"
	@docker compose exec -T server1 cypher-shell -u neo4j -p ${NEO4J_PASS} < scripts/neo4j_indexes.cypher
	@echo -e "${GREEN}Neo4j indexes created.${NC}"

list_neo4j_indexes:
	@echo -e "${BLUE}Listing Neo4j indexes...${NC}"
	@docker compose exec -T server1 cypher-shell -u neo4j -p ${NEO4J_PASS} \
		"SHOW INDEXES YIELD name, type, labelsOrTypes, properties, state" \
		|| echo -e "${RED}Failed to list Neo4j indexes${NC}"

# Main targets
all: start_services create_kafka_topics deploy train

# Special target to allow passing arguments to other targets
%:
	@:

# Usage: make classify IMAGE_PATH=/path/to/image.jpg [PARAMS='{"param1": "value1", "param2": "value2"}']
# Example: make classify IMAGE_PATH=/path/to/image.jpg PARAMS='{"feature_weight": 0.7, "structural_weight": 0.3}'
classify:
	@echo -e "${BLUE}Classifying image: $(IMAGE_PATH)${NC}"
	@curl -X POST http://localhost:5002 \
		-H "Content-Type: application/json" \
		-d "{\"operation\": \"classify\", \"parameters\": $(PARAMS)}" || \
		(echo -e "${RED}Classification failed.${NC}" && exit 1)
	@echo -e "${GREEN}Classification request sent to connector.${NC}"

clean:
	@echo -e "${BLUE}Cleaning up...${NC}"
	@rm -rf ./training_results/*
	@echo -e "${GREEN}Cleanup completed.${NC}"

help:
	@echo "Available targets:"
	@echo "  all                - Deploy functions, create Kafka topics, and run training (default)"
	@echo "  deploy             - Deploy Nuclio functions"
	@echo "  create_kafka_topics - Create Kafka topics"
	@echo "  list_kafka_topics  - List existing Kafka topics"
	@echo "  create_neo4j_indexes - Create property indexes in Neo4j (idempotent)"
	@echo "  list_neo4j_indexes   - List existing Neo4j indexes"
	@echo "  train              - Run training script"
	@echo "  send_random_image  - Send a random image to the line detector"
	@echo "  classify           - Run classification with given concept_id and image_id"
	@echo "  clean              - Clean up training results"
	@echo "  help               - Show this help message"
	@echo "  send_to_connector  - Send data to connector (OPERATION=train|classify, CONCEPT_NAME=name)"
	@echo ""
	@echo "Configuration options:"
	@echo "  INSTANCES_SKEL      - Number of skeletonization instances (default: 3)"
	@echo "  INSTANCES_CONTOUR   - Number of contour analysis instances (default: 2)"
	@echo "  INSTANCES_CLASSIFICATION - Number of classification instances (default: 3)"
	@echo "  USE_ENERGY_MINIMIZATION - Use energy minimization for concept formation (default: true)"
	@echo ""
	@echo "Example: make deploy INSTANCES_SKEL=4 INSTANCES_CONTOUR=2 INSTANCES_CLASSIFICATION=3"

train_prepared_samples_%:
	$(eval subclass := $(filter-out $@,$(MAKECMDGOALS)))
	@echo -e "${BLUE}Running training script for prepared samples class $* subclass $(subclass)...${NC}"
	@make send_to_connector OPERATION=train CONCEPT_NAME=mnist_$* SUBCLASS=$(subclass) NUCLIO_STORAGE=$(NUCLIO_STORAGE)/prepared_samples/$*_$(subclass) SESSION_ID=$*_$(subclass)
	@echo -e "${GREEN}Training script completed.${NC}"

train_square:
	@echo -e "${BLUE}Running training script...${NC}"
	@make send_to_connector OPERATION=train CONCEPT_NAME=$(CONCEPT_NAME) NUCLIO_STORAGE=$(NUCLIO_STORAGE)/square
	@echo -e "${GREEN}Training script completed.${NC}"

train_triangle:
	@echo -e "${BLUE}Running training script...${NC}"
	@make send_to_connector OPERATION=train CONCEPT_NAME=$(CONCEPT_NAME) NUCLIO_STORAGE=$(NUCLIO_STORAGE)/triangle
	@echo -e "${GREEN}Training script completed.${NC}"

train_mnist_%:
	@echo -e "${BLUE}Running training script...${NC}"
	@make send_to_connector OPERATION=train CONCEPT_NAME=mnist_$* NUCLIO_STORAGE=$(NUCLIO_STORAGE)/mnist_$*/train SESSION_ID=$*
	@echo -e "${GREEN}Training script completed.${NC}"


send_to_connector:
	@echo -e "${BLUE}Sending data to connector...${NC}"
	@curl -X POST http://localhost:5002 \
		-H "Content-Type: application/json" \
		-d '{"operation": "$(OPERATION)", "parameters": {"dataset_path": "$(NUCLIO_STORAGE)", "concept_name": "$(CONCEPT_NAME)", "session_id": "$(SESSION_ID)", "subclass": "$(SUBCLASS)"}}' || \
		(echo -e "${RED}Failed to send data to connector.${NC}" && exit 1)
	@echo -e "\n${GREEN}Data sent to connector successfully.${NC}"

# Function deployment targets
.PHONY: dep_conn dep_skel dep_contour dep_concept dep_classification dep_all
.PHONY: undep_skel undep_contour undep_classification undep_all

# Common env/trigger fragments
SKEL_ENV = -e KAFKA_BOOTSTRAP_SERVERS="${KAFKA_BROKERS}" \
	-e DLQ_TOPIC="${DLQ_TOPIC}" \
	-e SIMPLIFICATION_EPSILON=5 \
	-e SKELETONIZATION_THRESHOLD=160 \
	-e KAFKA_TOPIC="${SKELETONIZATION_KAFKA_TOPIC}"

SKEL_TRIGGERS = --triggers '{"kafka-trigger": {"kind": "kafka-cluster", "maxWorkers": 1, "attributes": {"initialOffset": "earliest", "topics": ["${CONNECTOR_KAFKA_TOPIC}"], "brokers": ["${KAFKA_BROKERS}"], "consumerGroup": "skeletonization-group"}}}'

CONTOUR_ENV = -e NEO4J_DSN=bolt://${HOST_IP}:7687 \
	-e NEO4J_USER=neo4j \
	-e NEO4J_PASS=${NEO4J_PASS} \
	-e KAFKA_BOOTSTRAP_SERVERS="${KAFKA_BROKERS}" \
	-e DLQ_TOPIC="${DLQ_TOPIC}" \
	-e KAFKA_TOPIC="${CONTOUR_ANALYSIS_KAFKA_TOPIC}"

CONTOUR_TRIGGERS = --triggers '{"kafka-trigger": {"kind": "kafka-cluster", "maxWorkers": 1, "attributes": {"initialOffset": "earliest", "topics": ["${SKELETONIZATION_KAFKA_TOPIC}"], "brokers": ["${KAFKA_BROKERS}"], "consumerGroup": "contour-analysis-group"}}}'

CLASS_ENV = -e KAFKA_BOOTSTRAP_SERVERS="${KAFKA_BROKERS}" \
	-e DLQ_TOPIC="${DLQ_TOPIC}" \
	-e KAFKA_TOPIC="${CLASSIFICATION_KAFKA_TOPIC}" \
	-e NEO4J_DSN=bolt://${HOST_IP}:7687 \
	-e NEO4J_USER=neo4j \
	-e NEO4J_PASS=${NEO4J_PASS} \
	-e GED_TIMEOUT=15

CLASS_TRIGGERS = --triggers '{"kafka-trigger": {"kind": "kafka-cluster", "maxWorkers": 1, "attributes": {"initialOffset": "earliest", "topics": ["${CONTOUR_ANALYSIS_KAFKA_TOPIC}"], "brokers": ["${KAFKA_BROKERS}"], "consumerGroup": "classification-group"}}}'

SKEL_IMAGE = nuclio/processor-skeletonization:latest
CONTOUR_IMAGE = nuclio/processor-contour-analysis:latest
CLASS_IMAGE = nuclio/processor-classification:latest

dep_conn:
	@echo -e "${BLUE}Deploying connector...${NC}"
	@nuctl deploy --path src/connector \
		--platform local \
		--volume "${LOCAL_STORAGE}:${NUCLIO_STORAGE}" \
		-e KAFKA_BOOTSTRAP_SERVERS="${KAFKA_BROKERS}" \
		-e DLQ_TOPIC="${DLQ_TOPIC}" \
		-e KAFKA_TOPIC="${CONNECTOR_KAFKA_TOPIC}"
	@echo -e "${GREEN}Connector deployed.${NC}"

dep_skel:
	@echo -e "${BLUE}Deploying skeletonization ($(INSTANCES_SKEL) instances)...${NC}"
	@echo -e "${BLUE}  Instance 1 (building image)...${NC}"
	@nuctl deploy skeletonization --path src/skeletonization \
		--platform local \
		--volume "${LOCAL_STORAGE}:${NUCLIO_STORAGE}" \
		$(SKEL_ENV) $(SKEL_TRIGGERS)
	@for i in $$(seq 2 $(INSTANCES_SKEL)); do \
		echo -e "${BLUE}  Instance $$i (reusing image)...${NC}"; \
		nuctl deploy skeletonization-$$i \
			--run-image $(SKEL_IMAGE) \
			--runtime python:3.9 \
			--handler nuclio_handler:handler \
			--platform local \
			--volume "${LOCAL_STORAGE}:${NUCLIO_STORAGE}" \
			$(SKEL_ENV) $(SKEL_TRIGGERS); \
	done
	@echo -e "${GREEN}Skeletonization deployed ($(INSTANCES_SKEL) instances).${NC}"

dep_contour:
	@echo -e "${BLUE}Deploying contour analysis ($(INSTANCES_CONTOUR) instances)...${NC}"
	@echo -e "${BLUE}  Instance 1 (building image)...${NC}"
	@nuctl deploy contour-analysis --path src/contour_analysis \
		--platform local \
		$(CONTOUR_ENV) $(CONTOUR_TRIGGERS)
	@for i in $$(seq 2 $(INSTANCES_CONTOUR)); do \
		echo -e "${BLUE}  Instance $$i (reusing image)...${NC}"; \
		nuctl deploy contour-analysis-$$i \
			--run-image $(CONTOUR_IMAGE) \
			--runtime python:3.9 \
			--handler nuclio_handler:handler \
			--platform local \
			$(CONTOUR_ENV) $(CONTOUR_TRIGGERS); \
	done
	@echo -e "${GREEN}Contour analysis deployed ($(INSTANCES_CONTOUR) instances).${NC}"

dep_concept:
	@echo -e "${BLUE}Deploying concept creator...${NC}"
	@nuctl deploy --path src/concept_creator \
		--platform local \
		-e NEO4J_DSN=bolt://${HOST_IP}:7687 \
		-e NEO4J_USER=neo4j \
		-e NEO4J_PASS=${NEO4J_PASS} \
		-e USE_ENERGY_MINIMIZATION=${USE_ENERGY_MINIMIZATION}
	@echo -e "${GREEN}Concept creator deployed.${NC}"

dep_classification:
	@echo -e "${BLUE}Deploying classification ($(INSTANCES_CLASSIFICATION) instances)...${NC}"
	@echo -e "${BLUE}  Instance 1 (building image)...${NC}"
	@nuctl deploy classification --path src/classification \
		--platform local \
		--volume "${LOCAL_MODEL_PATH}:${NUCLIO_STORAGE}" \
		$(CLASS_ENV) $(CLASS_TRIGGERS)
	@for i in $$(seq 2 $(INSTANCES_CLASSIFICATION)); do \
		echo -e "${BLUE}  Instance $$i (reusing image)...${NC}"; \
		nuctl deploy classification-$$i \
			--run-image $(CLASS_IMAGE) \
			--runtime python:3.9 \
			--handler nuclio_handler:handler \
			--platform local \
			--volume "${LOCAL_MODEL_PATH}:${NUCLIO_STORAGE}" \
			$(CLASS_ENV) $(CLASS_TRIGGERS); \
	done
	@echo -e "${GREEN}Classification deployed ($(INSTANCES_CLASSIFICATION) instances).${NC}"

# Cleanup targets for multi-instance functions
undep_skel:
	@echo -e "${BLUE}Removing skeletonization instances...${NC}"
	@nuctl delete function skeletonization --platform local 2>/dev/null || true
	@for i in $$(seq 2 10); do \
		nuctl delete function skeletonization-$$i --platform local 2>/dev/null || true; \
	done
	@echo -e "${GREEN}Skeletonization instances removed.${NC}"

undep_contour:
	@echo -e "${BLUE}Removing contour analysis instances...${NC}"
	@nuctl delete function contour-analysis --platform local 2>/dev/null || true
	@for i in $$(seq 2 10); do \
		nuctl delete function contour-analysis-$$i --platform local 2>/dev/null || true; \
	done
	@echo -e "${GREEN}Contour analysis instances removed.${NC}"

undep_classification:
	@echo -e "${BLUE}Removing classification instances...${NC}"
	@nuctl delete function classification --platform local 2>/dev/null || true
	@for i in $$(seq 2 10); do \
		nuctl delete function classification-$$i --platform local 2>/dev/null || true; \
	done
	@echo -e "${GREEN}Classification instances removed.${NC}"

undep_all: undep_skel undep_contour undep_classification
	@nuctl delete function connector --platform local 2>/dev/null || true
	@nuctl delete function concept-creator --platform local 2>/dev/null || true
	@echo -e "${GREEN}All functions removed.${NC}"

dep_all: dep_conn dep_skel dep_contour dep_concept dep_classification
	@echo -e "${BLUE}Pruning dangling Docker images...${NC}"
	@docker image prune -f
	@echo -e "${GREEN}All functions deployed.${NC}"

# Update the existing deploy target to use dep_all
deploy: clean_results dep_all

# Rename clean_training_results to clean_results
clean_results:
	@echo -e "${BLUE}Cleaning previous results...${NC}"
	@rm -rf ./training_results/*
	@echo -e "${GREEN}Results cleaned.${NC}"

# Add a target to deploy with energy minimization
deploy_energy_minimization:
	@echo -e "${BLUE}Deploying with energy minimization concept formation...${NC}"
	@make deploy USE_ENERGY_MINIMIZATION=true
	@echo -e "${GREEN}Deployed with energy minimization concept formation.${NC}"

# Add a target to train with energy minimization
train_energy_minimization:
	@echo -e "${BLUE}Training with energy minimization concept formation...${NC}"
	@make train USE_ENERGY_MINIMIZATION=true
	@echo -e "${GREEN}Training with energy minimization concept formation completed.${NC}"