#include <utility>

#include <utility>

#include <utility>

#include <utility>

//
// Created by pulsarv on 19-12-6.
//

#ifndef MOBILESEARCH_TASK_MANAGER_H
#define MOBILESEARCH_TASK_MANAGER_H

#include <set>
#include <stack>
#include <list>
#include <mutex>
#include <string>
#include <thread>
#include <vector>
#include <atomic>
#include <memory>
#include <utility>
#include <iostream>
#include <algorithm>
#include <QtGui/QFont>
#include <boost/thread.hpp>
#include <condition_variable>
#include <vehicle_detector.h>
#include <road_segmentation.h>
#include <vehicle_attributes_classifier.h>

#include <task_container.hpp>
#include <input_wrappers.h>
#include <QtGui/QFontDatabase>
#include <QtWidgets/QApplication>
#include <QtQml/QQmlApplicationEngine>
#include <boost/interprocess/ipc/message_queue.hpp>
#include <grid_mat.h>
#include <net_wrappers.h>
#include "data_struct.h"

typedef std::chrono::duration<float, std::chrono::seconds::period> Sec;
namespace MobileSearch {


    bool try_lock_sources();

    void unlock_sources();

    void lock_sources();

    class VideoFrame {  // VideoFrame can represent not a single image but the whole grid
    public:
        typedef std::shared_ptr<VideoFrame> Ptr;

        VideoFrame(unsigned sourceID, int64_t frameId, cv::Mat frame = cv::Mat()) :
                sourceID{sourceID}, frameId{frameId}, frame{std::move(frame)} {}

        virtual ~VideoFrame() = default;  // A user has to define how it is reconstructed

        const unsigned sourceID;
        const int64_t frameId;
        cv::Mat frame;
    };

    class Task {
    public:
        explicit Task(VideoFrame::Ptr sharedVideoFrame, float priority = 0) :
                sharedVideoFrame{std::move(sharedVideoFrame)}, priority{priority} {}

        virtual bool isReady() = 0;

        virtual void process() = 0;

        virtual ~Task() = default;

        VideoFrame::Ptr sharedVideoFrame;  // it is possible that two tasks try to draw on the same cvMat
        const float priority;
    };

    struct HigherPriority {
        bool operator()(const std::shared_ptr<Task> &lhs, const std::shared_ptr<Task> &rhs) const {
            return lhs->priority > rhs->priority
                   ||
                   (lhs->priority == rhs->priority && lhs->sharedVideoFrame->frameId < rhs->sharedVideoFrame->frameId)
                   || (lhs->priority == rhs->priority &&
                       lhs->sharedVideoFrame->frameId == rhs->sharedVideoFrame->frameId && lhs < rhs);
        }
    };

    class VideoThread {
    public:

        VideoThread(camera_t camera);

        void run(boost::shared_ptr<boost::interprocess::message_queue> &p_mobilesearch_video_message_queue);

        void stop();

        static void video_thread(int &camera_index,
                                 const boost::shared_ptr<boost::interprocess::message_queue> &p_mobilesearch_video_message_queue);

    private:
        camera_t camera;
        boost::thread *v_thread{};

    };

    class Worker {
    public:
        explicit Worker(unsigned threadNum);

        ~Worker();

        void runThreads();

        void push(std::shared_ptr<Task> task);

        void threadFunc();

        void stop();

        void join();

    private:
        std::condition_variable tasksCondVar;
        std::set<std::shared_ptr<Task>, HigherPriority> tasks;
        std::mutex tasksMutex;
        std::vector<std::thread> threadPull;
        std::atomic<bool> running;
        std::exception_ptr currentException;
        std::mutex excpetionMutex;
    };

    struct BboxAndDescr {
        enum class ObjectType {
            NONE,
            VEHICCLE,
            PLATE,
        } objectType;
        cv::Rect rect;
        std::string descr;
    };

    struct InferRequestsContainer {
        explicit InferRequestsContainer(std::vector<InferenceEngine::InferRequest> inferRequests) :
                actualInferRequests{inferRequests} {
            for (auto &ir : actualInferRequests) {
                inferRequests.push_back(ir);
            }
        }

        InferRequestsContainer() = default;

        InferRequestsContainer &operator=(const InferRequestsContainer &other) {  // copy assignment
            if (this != &other) {  // self-assignment check expected
                this->actualInferRequests = other.actualInferRequests;
                for (auto &ir : this->actualInferRequests) {
                    this->inferRequests.container.emplace_back(ir);
                }
            }
            return *this;
        }

        std::vector<InferenceEngine::InferRequest> getActualInferRequests() {
            return actualInferRequests;
        }

        ConcurrentContainer<std::vector<std::reference_wrapper<InferenceEngine::InferRequest>>> inferRequests;

    private:
        std::vector<InferenceEngine::InferRequest> actualInferRequests;
    };

    struct Context {  // stores all global data for tasks
        Context(const std::vector<std::shared_ptr<InputChannel>> &inputChannels,
                const std::weak_ptr<Worker> &readersWorker,
                const Detector &detector, const std::weak_ptr<Worker> &inferTasksWorker,
                const VehicleAttributesClassifier &vehicleAttributesClassifier, const Lpr &lpr,
                const std::weak_ptr<Worker> &detectionsProcessorsWorker,
                int pause, const std::vector<cv::Size> &gridParam, const cv::Size &displayResolution,
                std::chrono::steady_clock::duration showPeriod,
                const std::weak_ptr<Worker> &drawersWorker,
                uint64_t lastFrameId,
                std::weak_ptr<Worker> resAggregatorsWorker,
                uint64_t nireq,
                bool isVideo,
                std::size_t nclassifiersireq, std::size_t nrecognizersireq) :
                readersContext{inputChannels, readersWorker, std::vector<int64_t>(inputChannels.size(), -1),
                               std::vector<std::mutex>(inputChannels.size())},
                inferTasksContext{detector, inferTasksWorker},
                detectionsProcessorsContext{vehicleAttributesClassifier, lpr, detectionsProcessorsWorker},
                drawersContext{pause, gridParam, displayResolution, showPeriod, drawersWorker},
                videoFramesContext{std::vector<uint64_t>(inputChannels.size(), lastFrameId),
                                   std::vector<std::mutex>(inputChannels.size())},
                resAggregatorsWorker{std::move(resAggregatorsWorker)},
                nireq{nireq},
                isVideo{isVideo},
                t0{std::chrono::steady_clock::time_point()},
                freeDetectionInfersCount{0},
                frameCounter{0} {
            assert(inputChannels.size() == gridParam.size());
            std::vector<InferenceEngine::InferRequest> detectorInferRequests;
            std::vector<InferenceEngine::InferRequest> attributesInferRequests;
            std::vector<InferenceEngine::InferRequest> lprInferRequests;
            detectorInferRequests.reserve(nireq);
            attributesInferRequests.reserve(nclassifiersireq);
            lprInferRequests.reserve(nrecognizersireq);
            std::generate_n(std::back_inserter(detectorInferRequests), nireq, [&] {
                return inferTasksContext.detector.createInferRequest();
            });
            std::generate_n(std::back_inserter(attributesInferRequests), nclassifiersireq, [&] {
                return detectionsProcessorsContext.vehicleAttributesClassifier.createInferRequest();
            });
            std::generate_n(std::back_inserter(lprInferRequests), nrecognizersireq, [&] {
                return detectionsProcessorsContext.lpr.createInferRequest();
            });
            detectorsInfers = InferRequestsContainer(detectorInferRequests);
            attributesInfers = InferRequestsContainer(attributesInferRequests);
            platesInfers = InferRequestsContainer(lprInferRequests);
        }

        struct {
            std::vector<std::shared_ptr<InputChannel>> inputChannels;
            std::weak_ptr<Worker> readersWorker;
            std::vector<int64_t> lastCapturedFrameIds;
            std::vector<std::mutex> lastCapturedFrameIdsMutexes;
        } readersContext;
        struct {
            Detector detector;
            std::weak_ptr<Worker> inferTasksWorker;
        } inferTasksContext;
        struct {
            VehicleAttributesClassifier vehicleAttributesClassifier;
            Lpr lpr;
            std::weak_ptr<Worker> detectionsProcessorsWorker;
        } detectionsProcessorsContext;

        struct DrawersContext {
            DrawersContext(int pause, std::vector<cv::Size> gridParam, cv::Size displayResolution,
                           std::chrono::steady_clock::duration showPeriod,
                           std::weak_ptr<Worker> drawersWorker) :
                    pause{pause}, gridParam{std::move(gridParam)}, displayResolution{displayResolution},
                    showPeriod{showPeriod},
                    drawersWorker{std::move(drawersWorker)},
                    lastShownframeId{0}, prevShow{std::chrono::steady_clock::time_point()}, framesAfterUpdate{0},
                    updateTime{std::chrono::steady_clock::time_point()} {}

            int pause;
            std::vector<cv::Size> gridParam;
            cv::Size displayResolution;
            std::chrono::steady_clock::duration showPeriod;  // desiered frequency of imshow
            std::weak_ptr<Worker> drawersWorker;
            int64_t lastShownframeId;
            std::chrono::steady_clock::time_point prevShow;  // time stamp of previous imshow
            std::map<int64_t, GridMat> gridMats;
            std::mutex drawerMutex;
            std::ostringstream outThroughput;
            unsigned framesAfterUpdate;
            std::chrono::steady_clock::time_point updateTime;
        } drawersContext;

        struct {
            std::vector<uint64_t> lastframeIds;
            std::vector<std::mutex> lastFrameIdsMutexes;
        } videoFramesContext;
        std::weak_ptr<Worker> resAggregatorsWorker;
        std::mutex classifiersAggreagatorPrintMutex;
        uint64_t nireq;
        bool isVideo;
        std::chrono::steady_clock::time_point t0;
        std::atomic<std::vector<InferenceEngine::InferRequest>::size_type> freeDetectionInfersCount;
        std::atomic<uint64_t> frameCounter;
        InferRequestsContainer detectorsInfers, attributesInfers, platesInfers;
    };

    class ReborningVideoFrame : public VideoFrame {
    public:
        ReborningVideoFrame(Context &context, const unsigned sourceID, const int64_t frameId,
                            const cv::Mat &frame = cv::Mat()) :
                VideoFrame{sourceID, frameId, frame},
                context(context) {}  // can not write context{context} because of CentOS 7.4 compiler bug
        ~ReborningVideoFrame() override;

        Context &context;
    };

    class Drawer : public Task {  // accumulates and shows processed frames
    public:
        explicit Drawer(VideoFrame::Ptr sharedVideoFrame) :
                Task{std::move(sharedVideoFrame), 1.0} {}

        bool isReady() override;

        void process() override;
    };

    class ResAggregator : public Task {  // draws results on the frame
    public:
        ResAggregator(const VideoFrame::Ptr &sharedVideoFrame, std::list<BboxAndDescr> &&boxesAndDescrs) :
                Task{sharedVideoFrame, 4.0}, boxesAndDescrs{std::move(boxesAndDescrs)} {}

        bool isReady() override {
            return true;
        }

        void process() override;

    private:
        std::list<BboxAndDescr> boxesAndDescrs;
    };

    void tryPush(const std::weak_ptr<Worker> &worker, std::shared_ptr<Task> &&task);

    class ClassifiersAggreagator {
        // waits for all classifiers and recognisers accumulating results
    public:
        std::string rawDetections;
        ConcurrentContainer<std::list<std::string>> rawAttributes;
        ConcurrentContainer<std::list<std::string>> rawDecodedPlates;

        explicit ClassifiersAggreagator(VideoFrame::Ptr sharedVideoFrame) :
                sharedVideoFrame{std::move(sharedVideoFrame)} {}

        ~ClassifiersAggreagator() {
            std::mutex &printMutex = dynamic_cast<ReborningVideoFrame *>(sharedVideoFrame.get())->context.classifiersAggreagatorPrintMutex;
            printMutex.lock();
            std::cout << rawDetections;
            for (const std::string &rawAttribute : rawAttributes.container) {  // destructor assures that none uses the container
                std::cout << rawAttribute;
            }
            for (const std::string &rawDecodedPlate : rawDecodedPlates.container) {
                std::cout << rawDecodedPlate;
            }
            printMutex.unlock();
            tryPush(dynamic_cast<ReborningVideoFrame *>(sharedVideoFrame.get())->context.resAggregatorsWorker,
                    std::make_shared<ResAggregator>(sharedVideoFrame, std::move(boxesAndDescrs)));
        }

        void push(BboxAndDescr &&bboxAndDescr) {
            boxesAndDescrs.lockedPush_back(bboxAndDescr);
        }

        const VideoFrame::Ptr sharedVideoFrame;

    private:
        ConcurrentContainer<std::list<BboxAndDescr>> boxesAndDescrs;
    };

    class DetectionsProcessor
            : public Task {
        // extracts detections from blob InferRequests and runs classifiers and recognisers
    public:
        DetectionsProcessor(VideoFrame::Ptr sharedVideoFrame, InferenceEngine::InferRequest *inferRequest) :
                Task{std::move(sharedVideoFrame), 1.0}, inferRequest{inferRequest},
                requireGettingNumberOfDetections{true} {}

        DetectionsProcessor(VideoFrame::Ptr sharedVideoFrame,
                            std::shared_ptr<ClassifiersAggreagator> &&classifiersAggreagator,
                            std::list<cv::Rect> &&vehicleRects,
                            std::list<cv::Rect> &&plateRects) :
                Task{std::move(sharedVideoFrame), 1.0}, classifiersAggreagator{std::move(classifiersAggreagator)},
                inferRequest{nullptr},
                vehicleRects{std::move(vehicleRects)}, plateRects{std::move(plateRects)},
                requireGettingNumberOfDetections{false} {}

        bool isReady() override;

        void process() override;

    private:
        std::string m_va;
        std::string m_lpr;
        bool FLAGS_r = false;
        std::shared_ptr<ClassifiersAggreagator> classifiersAggreagator;  // when no one stores this object we will draw
        InferenceEngine::InferRequest *inferRequest;
        std::list<cv::Rect> vehicleRects;
        std::list<cv::Rect> plateRects;
        std::vector<std::reference_wrapper<InferenceEngine::InferRequest>> reservedAttributesRequests;
        std::vector<std::reference_wrapper<InferenceEngine::InferRequest>> reservedLprRequests;
        bool requireGettingNumberOfDetections;
    };

    class InferTask : public Task {  // runs detection
    public:
        explicit InferTask(VideoFrame::Ptr sharedVideoFrame) :
                Task{std::move(sharedVideoFrame), 5.0} {}

        bool isReady() override;

        void process() override;
    };

    class Reader : public Task {
    public:
        explicit Reader(VideoFrame::Ptr sharedVideoFrame) :
                Task{std::move(sharedVideoFrame), 2.0} {}

        bool isReady() override;

        void process() override;
    };


    class TaskManager {
    public:
        TaskManager(std::vector<camera_t> cameras);

        ~TaskManager();

        void clear_message_queue();

        void run_worker();

        void stop_worker();

    private:
        std::vector<camera_t> cameras;
        VideoThread *videoThread{};
    };
}


#endif //MOBILESEARCH_TASK_MANAGER_H
