package pers.xsb.jhipster.search2.web.rest;

import pers.xsb.jhipster.search2.domain.CrawlerPage;
import pers.xsb.jhipster.search2.repository.CrawlerPageRepository;
import pers.xsb.jhipster.search2.repository.search.CrawlerPageSearchRepository;
import pers.xsb.jhipster.search2.web.rest.errors.BadRequestAlertException;

import io.github.jhipster.web.util.HeaderUtil;
import io.github.jhipster.web.util.PaginationUtil;
import io.github.jhipster.web.util.ResponseUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpStatus;
import org.springframework.web.servlet.support.ServletUriComponentsBuilder;
import org.springframework.http.ResponseEntity;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.web.bind.annotation.*;

import java.net.URI;
import java.net.URISyntaxException;
import java.util.List;
import java.util.Optional;
import java.util.stream.Collectors;
import java.util.stream.StreamSupport;

import static org.elasticsearch.index.query.QueryBuilders.*;

/**
 * REST controller for managing {@link pers.xsb.jhipster.search2.domain.CrawlerPage}.
 */
@RestController
@RequestMapping("/api")
@Transactional
public class CrawlerPageResource {

    private final Logger log = LoggerFactory.getLogger(CrawlerPageResource.class);

    private static final String ENTITY_NAME = "crawlerPage";

    @Value("${jhipster.clientApp.name}")
    private String applicationName;

    private final CrawlerPageRepository crawlerPageRepository;

    private final CrawlerPageSearchRepository crawlerPageSearchRepository;

    public CrawlerPageResource(CrawlerPageRepository crawlerPageRepository, CrawlerPageSearchRepository crawlerPageSearchRepository) {
        this.crawlerPageRepository = crawlerPageRepository;
        this.crawlerPageSearchRepository = crawlerPageSearchRepository;
    }

    /**
     * {@code POST  /crawler-pages} : Create a new crawlerPage.
     *
     * @param crawlerPage the crawlerPage to create.
     * @return the {@link ResponseEntity} with status {@code 201 (Created)} and with body the new crawlerPage, or with status {@code 400 (Bad Request)} if the crawlerPage has already an ID.
     * @throws URISyntaxException if the Location URI syntax is incorrect.
     */
    @PostMapping("/crawler-pages")
    public ResponseEntity<CrawlerPage> createCrawlerPage(@RequestBody CrawlerPage crawlerPage) throws URISyntaxException {
        log.debug("REST request to save CrawlerPage : {}", crawlerPage);
        if (crawlerPage.getId() != null) {
            throw new BadRequestAlertException("A new crawlerPage cannot already have an ID", ENTITY_NAME, "idexists");
        }
        CrawlerPage result = crawlerPageRepository.save(crawlerPage);
        crawlerPageSearchRepository.save(result);
        return ResponseEntity.created(new URI("/api/crawler-pages/" + result.getId()))
            .headers(HeaderUtil.createEntityCreationAlert(applicationName, true, ENTITY_NAME, result.getId().toString()))
            .body(result);
    }

    /**
     * {@code PUT  /crawler-pages} : Updates an existing crawlerPage.
     *
     * @param crawlerPage the crawlerPage to update.
     * @return the {@link ResponseEntity} with status {@code 200 (OK)} and with body the updated crawlerPage,
     * or with status {@code 400 (Bad Request)} if the crawlerPage is not valid,
     * or with status {@code 500 (Internal Server Error)} if the crawlerPage couldn't be updated.
     * @throws URISyntaxException if the Location URI syntax is incorrect.
     */
    @PutMapping("/crawler-pages")
    public ResponseEntity<CrawlerPage> updateCrawlerPage(@RequestBody CrawlerPage crawlerPage) throws URISyntaxException {
        log.debug("REST request to update CrawlerPage : {}", crawlerPage);
        if (crawlerPage.getId() == null) {
            throw new BadRequestAlertException("Invalid id", ENTITY_NAME, "idnull");
        }
        CrawlerPage result = crawlerPageRepository.save(crawlerPage);
        crawlerPageSearchRepository.save(result);
        return ResponseEntity.ok()
            .headers(HeaderUtil.createEntityUpdateAlert(applicationName, true, ENTITY_NAME, crawlerPage.getId().toString()))
            .body(result);
    }

    /**
     * {@code GET  /crawler-pages} : get all the crawlerPages.
     *
     * @param pageable the pagination information.
     * @return the {@link ResponseEntity} with status {@code 200 (OK)} and the list of crawlerPages in body.
     */
    @GetMapping("/crawler-pages")
    public ResponseEntity<List<CrawlerPage>> getAllCrawlerPages(Pageable pageable) {
        log.debug("REST request to get a page of CrawlerPages");
        Page<CrawlerPage> page = crawlerPageRepository.findAll(pageable);
        HttpHeaders headers = PaginationUtil.generatePaginationHttpHeaders(ServletUriComponentsBuilder.fromCurrentRequest(), page);
        return ResponseEntity.ok().headers(headers).body(page.getContent());
    }

    /**
     * {@code GET  /crawler-pages/:id} : get the "id" crawlerPage.
     *
     * @param id the id of the crawlerPage to retrieve.
     * @return the {@link ResponseEntity} with status {@code 200 (OK)} and with body the crawlerPage, or with status {@code 404 (Not Found)}.
     */
    @GetMapping("/crawler-pages/{id}")
    public ResponseEntity<CrawlerPage> getCrawlerPage(@PathVariable Long id) {
        log.debug("REST request to get CrawlerPage : {}", id);
        Optional<CrawlerPage> crawlerPage = crawlerPageRepository.findById(id);
        return ResponseUtil.wrapOrNotFound(crawlerPage);
    }

    /**
     * {@code DELETE  /crawler-pages/:id} : delete the "id" crawlerPage.
     *
     * @param id the id of the crawlerPage to delete.
     * @return the {@link ResponseEntity} with status {@code 204 (NO_CONTENT)}.
     */
    @DeleteMapping("/crawler-pages/{id}")
    public ResponseEntity<Void> deleteCrawlerPage(@PathVariable Long id) {
        log.debug("REST request to delete CrawlerPage : {}", id);
        crawlerPageRepository.deleteById(id);
        crawlerPageSearchRepository.deleteById(id);
        return ResponseEntity.noContent().headers(HeaderUtil.createEntityDeletionAlert(applicationName, true, ENTITY_NAME, id.toString())).build();
    }

    /**
     * {@code SEARCH  /_search/crawler-pages?query=:query} : search for the crawlerPage corresponding
     * to the query.
     *
     * @param query the query of the crawlerPage search.
     * @param pageable the pagination information.
     * @return the result of the search.
     */
    @GetMapping("/_search/crawler-pages")
    public ResponseEntity<List<CrawlerPage>> searchCrawlerPages(@RequestParam String query, Pageable pageable) {
        log.debug("REST request to search for a page of CrawlerPages for query {}", query);
        Page<CrawlerPage> page = crawlerPageSearchRepository.search(queryStringQuery(query), pageable);
        HttpHeaders headers = PaginationUtil.generatePaginationHttpHeaders(ServletUriComponentsBuilder.fromCurrentRequest(), page);
        return ResponseEntity.ok().headers(headers).body(page.getContent());
        }
}
