#!/usr/bin/env php
<?php

/*

Wikimedia Commons Feed Updater - Desktopd Picture Server
Copyright (C) 2015  Desktopd Developer(s)

This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as
published by the Free Software Foundation, either version 3 of the
License, or (at your option) any later version.

This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
GNU Affero General Public License for more details.

You should have received a copy of the GNU Affero General Public License
along with this program.  If not, see <http://www.gnu.org/licenses/>.

https://www.gnu.org/licenses/agpl.html

@kind entry-executable
@comment Unless specified, @kind defaults to 'class'

*/


namespace Desktopd;

use Desktopd\PlatformTools\Log;
use Desktopd\PlatformTools\CallbackLogger;
use Desktopd\PlatformTools\Random;
use Desktopd\PlatformTools\Timer;


/*
    Autoloader
*/
spl_autoload_register(function ($class) {
    if (class_exists($class, false)) {
        return;
    }
    
    $name = strtr("$class", '\\', '/');
    if (!preg_match('~^[_a-zA-Z][_a-zA-Z0-9]*(?:/[_a-zA-Z][_a-zA-Z0-9]*)*$~', $name)) {
        return;
    }
    
    $path = dirname(__DIR__) . "/classes/{$name}.php";
    if (is_file($path) && is_readable($path)) {
        require_once $path;
    }
    
}, true, true);


/*
    Load settings
*/
require_once __DIR__ . '/DefaultSettings.php';


/*
    Update daemon
*/
Log::setLogFile(__DIR__ . '/log_update-wikimedia-commons.log');
Log::setLogLevel(Log::DEBUG);
Log::setConsoleLogLevel(Log::INFO);


/*
    Print what is this program
*/
PlatformTools\Log::log("Starting: $desktopdProductName, version $desktopdVersion", PlatformTools\Log::INFO);
PlatformTools\Log::log("Module: Wikimedia Commons Feed Updater", PlatformTools\Log::INFO);


$errorsToAbort = $pictureServerErrorsToAbort;

try {
    $daemon = new PlatformTools\DaemonTools('desktopd-server-update', $errorsToAbort);
    $daemon->downloader = new PictureServer\Downloader();
} catch (\Exception $e) {
    Log::log("Exception: $e", Log::CRITICAL);
    Log::log("Critical ERROR, exiting...", Log::WARN);
    exit;
}

$consoleLogLevel = Log::getConsoleLogLevel();
Log::log("Console logging level is $consoleLogLevel", Log::DEBUG);

// loglevel 'INFO' as currently this takes *thousands* of milliseconds
// XXX: some speedup required
Log::log("Initializing HTTPS Everywhere...", Log::INFO);
$rulesetDir = $pictureServerHTTPSEverywhereRulesetDirectory;
$userRulesetDir = $pictureServerHTTPSEverywhereUserRulesetDirectory;
$daemon->httpsEverywhere = new HTTPSEverywhere\HTTPSEverywhere($rulesetDir, $userRulesetDir);
Log::log("HTTPS Everywhere: Enabled", Log::DEBUG);

$feedURIs = $pictureServerWikimediaCommonsFeedURIs;

$max = $pictureServerPictureDirectoryMaxStoredPictures;
$sleepFirst = $pictureServerWikimediaCommonsSleepFirst;
$maxLoop = $pictureServerWikimediaCommonsMaxLoop;


$errorStatusCount = 0;
$daemon->downloader->addStatusListener(function ($uri, $success, $message) use (&$errorStatusCount) {
    $status = $success ? 'OK' : 'FAILED: ' . ++$errorStatusCount;
    Log::log("[Status] ($status) $message: $uri", Log::INFO);
});


$sleep = function () use ($pictureServerWikimediaCommonsMeanSleep) {
    if (function_exists('pcntl_signal_dispatch')) {
        pcntl_signal_dispatch();
    }
    
    // Sleep for avg. 12 hours
    // Errors in 64bit float numbers should not be a major problem...
    $nanosleep = Random::exponentialByMean(1e9 * $pictureServerWikimediaCommonsMeanSleep);
    
    $seconds = $nanosleep / 1e9;
    Log::log(sprintf("Sleeping for about %d s ...", round($seconds)), Log::NOTICE);
    $seconds = floor($seconds);
    $nanoseconds = (int) ($nanosleep - ($seconds * 1e9));
    
    while (true) {
        $sleeped = time_nanosleep($seconds, $nanoseconds);
        if ($sleeped === true) {
            // OK
            break;
        }
        
        if (is_array($sleeped) && isset($sleeped['seconds'], $sleeped['nanoseconds'])) {
            if (function_exists('pcntl_signal_dispatch')) {
                pcntl_signal_dispatch();
            }
            
            Log::log("Interrupted, sleeping again...", Log::NOTICE);
            $seconds = $sleeped['seconds'];
            $nanoseconds = $sleeped['nanoseconds'];
            continue;
        }
        
        Log::log("Error sleeping!", Log::WARNING);
        break;
    }
};


$loopCount = 0;

while (true) {
    $daemon->doTasks();
    
    if ((!is_numeric($maxLoop)) || $maxLoop <= 0 || is_infinite($maxLoop)) {
        Log::log("maxLoop is disabled, so I'll loop forever", Log::NOTICE);
    } elseif ($loopCount < $maxLoop) {
        Log::log("maxLoop is $maxLoop", Log::INFO);
    } else {
        Log::log("Reached maxLoop=$maxLoop, so breaking the main loop!", Log::NOTICE);
        break;
    }
    
    Log::log("Entering loop #$loopCount ...", Log::INFO);
    
    if ($sleepFirst) {
        Log::log("sleepFirst is enabled, sleeping *now*", Log::NOTICE);
        $sleep();
    } else {
        Log::log("sleepFirst is disabled, NOT sleeping now", Log::NOTICE);
    }
    
    // get the list of available pictures $uriList
    Log::log("Retrieving the list of available pictures...", Log::INFO);
    $daemon->retriever = new PictureServer\CommonsRetriever($daemon->downloader);
    
    $daemon->retriever->addLogger(new CallbackLogger(function ($message, $level) {
        Log::log($message, $level);
    }));
    
    foreach ($feedURIs as $feedURI) {
        $rewritten = $daemon->httpsEverywhere->rewrite($feedURI);
        if ($rewritten !== $feedURI) {
            $rulesetName = $daemon->httpsEverywhere->getLastRulesetName();
            Log::log("HTTPS Everywhere Enabled: $rulesetName", Log::NOTICE);
            Log::log("HTTPS Everywhere: Original URI: $feedURI", Log::DEBUG);
            Log::log("HTTPS Everywhere: Rewritten URI: $rewritten", Log::DEBUG);
            $feedURI = $rewritten;
        }
        
        unset($rewritten);
        
        Log::log("Sniffing a feed URI: $feedURI", Log::INFO);
        $success = $daemon->retriever->sniffFeed($feedURI);
        if ($success === false) {
            Log::log("Could not get the image URI(s) from: $feedURI", Log::ERROR);
        } else {
            $host = $daemon->downloader->getLastHost();
            $cipher = $daemon->downloader->getLastCipher();
            $cipher = $cipher && is_array($cipher) ?
                implode(', ', $cipher)
                : '(No TLS / unknown)';
            
            Log::log("$host: Connection using $cipher", Log::INFO);
            Log::log("Found $success picture(s)", Log::DEBUG);
        }
    }
    
    $count = count($daemon->retriever);
    Log::log("$count picture(s) listed!", Log::INFO);
    
    $uriList = $daemon->retriever->getURIs();
    //unset($daemon->retriever); // moved below
    
    
    /* Initialize and lock the picture directory */
    $daemon->directory = new PictureServer\PictureDirectory($pictureServerPictureDirectory);
    Log::log("Initialized the picture directory: {$daemon->directory}", Log::INFO);
    
    
    $lastRulesetName = $lastHost = false;
    $alreadyDownloaded = 0;
    foreach ($uriList as $uriListKey => $uri) {
        /*
            Rewrite a URI with HTTPS Everywhere
            TODO: HTTPS Everywhere has no effect when cURL handles HTTP redirects
        */
        
        $rewritten = $daemon->httpsEverywhere->rewrite($uri);
        if ($rewritten !== $uri) {
            $rulesetName = $daemon->httpsEverywhere->getLastRulesetName();
            if ($rulesetName !== $lastRulesetName) {
                Log::log("HTTPS Everywhere Enabled: $rulesetName", Log::NOTICE);
            } else {
                Log::log("HTTPS Everywhere Enabled: $rulesetName", Log::DEBUG);
            }
            
            $lastRulesetName = $rulesetName;
            
            Log::log("HTTPS Everywhere: Original URI: $uri", Log::DEBUG2);
            Log::log("HTTPS Everywhere: Rewritten URI: $rewritten", Log::DEBUG2);
            $uri = $rewritten;
        } else {
            $lastRulesetName = false;
        }
        
        unset($rewritten, $rulesetName);
        
        /*
            Look the directory for the picture
        */
        if (false === $daemon->directory->getPath($uri)) {
            Log::log("New file: $uri", Log::DEBUG2);
            if (false === $daemon->downloader->addURI($uri)) {
                Log::log("Could not add to the download list: $uri", Log::ERROR);
            } else {
                $host = $daemon->downloader->getLastHost();
                if ($host !== $lastHost) {
                    $cipher = $daemon->downloader->getLastCipher();
                    $cipher = $cipher && is_array($cipher) ?
                        implode(', ', $cipher)
                        : '(No TLS / unknown)';
                    
                    Log::log("$host: Connection using $cipher", Log::INFO);
                    $lastHost = $host;
                }
                
                Log::log("Added to the download list: $uri", Log::INFO);
                
            }
        } else {
            Log::log("Already downloaded: $uri", Log::DEBUG);
            $alreadyDownloaded++;
            unset($uriList[$uriListKey]);
        }
        
        $type = PictureServer\PictureDirectory::SUBSTREAM_TYPE_JSON;
        if (false === $daemon->directory->subStreamExists($uri, 'metadata', $type)) {
            // fetch metadata
            Log::log("Fetching metadata using API: $uri", Log::DEBUG);
            $metadata = $daemon->retriever->downloadMetadata($uri);
            if (isset($metadata['api'])) {
                Log::log("Metadata downloaded from: {$metadata['api']}", Log::INFO);
                $metadata = json_encode($metadata, JSON_UNESCAPED_SLASHES);
                if ($daemon->directory->writeSubstream($metadata, $uri, 'metadata', $type)) {
                    Log::log("Metadata is successfully written on disk", Log::DEBUG);
                } else {
                    Log::log("Failed to write metadata on disk", Log::WARNING);
                }
            } else {
                Log::log("Failed to get metadata using API", Log::WARNING);
            }
        } else {
            Log::log("Metadata is already available on disk", Log::DEBUG);
        }
    }
    
    unset($daemon->retriever);
    
    Log::log("Skipped: $alreadyDownloaded file(s)", Log::INFO);
    
    
    // download
    $count = $daemon->downloader->count();
    Log::log("Downloading $count file(s) ...", Log::INFO);
    
    // TODO: show progress (print finished downloads) -> [done?]
    // TODO: make downloads atomic (purge failed downloads)
    // so that only successfully downloaded files are listed -> [partial?]
    // TODO: Download description pages (and license notices)
    // and make them available to clients (in JSON?) -> [done?]
    
    $exitHandler = function () use ($uriList, $daemon) {
        $done = $daemon->downloader->getDoneURIs();
        foreach ($uriList as $uri) {
            if (in_array($uri, $done, true)) {
                // already downloaded
                continue;
            }
            
            if ($daemon->directory->deleteData($uri)) {
                Log::log("Removed a unfinished download: $uri", Log::NOTICE);
            }
        }
    };
    
    if ($count > 0) {
        $daemon->addExitHandler($exitHandler);
        
        $timer = new Timer();
        $failed = $daemon->downloader->fetch($daemon->directory);
        $time = $timer->getTime();
        
        $daemon->removeExitHandler($exitHandler);
        if ($failed === false) {
            Log::log("Unknown error downloading files", Log::ERROR);
        } elseif (is_array($failed) && 0 < count($failed)) {
            $count = count($failed);
            Log::log("Failed to download $count file(s)", Log::ERROR);
            foreach ($failed as $failedURI) {
                Log::log("Download failure: $failedURI", Log::ERROR);
            }
        } else {
            Log::log("Downloaded $count file(s) in $time s...", Log::INFO);
        }
        
        unset($timer, $time);
    }
    
    
    // clean old files
    Log::log("Cleaning old files ...", Log::INFO);
    
    $cleaned = $daemon->directory->cleanOldFiles($max);
    if ($cleaned === false) {
        Log::log("Error cleaning files", Log::ERROR);
    } else {
        Log::log("Removed URIs: $cleaned", Log::NOTICE);
    }
    
    /*
        Unlocking the directory
    */
    Log::log("Unlocking the picture directory: {$daemon->directory}", Log::INFO);
    unset($daemon->directory);
    
    
    $loopCount++;
    if ((!is_numeric($maxLoop)) || $maxLoop <= 0 || is_infinite($maxLoop)) {
        //Log::log("maxLoop is disabled, so I'll loop forever", Log::NOTICE);
    } elseif ($loopCount < $maxLoop) {
        //Log::log("maxLoop is $maxLoop", Log::INFO);
    } else {
        Log::log("Reached maxLoop=$maxLoop, so breaking the main loop!", Log::NOTICE);
        break;
    }
    
    
    if (!$sleepFirst) {
        Log::log("sleepFirst is disabled, so sleeping *now*", Log::NOTICE);
        $sleep();
    }
    
}


// vim: ts=4 et ai

