package pers.xsb.jhipster.search2.web.rest;

import pers.xsb.jhipster.search2.Search2App;
import pers.xsb.jhipster.search2.domain.CrawlerPage;
import pers.xsb.jhipster.search2.repository.CrawlerPageRepository;
import pers.xsb.jhipster.search2.repository.search.CrawlerPageSearchRepository;

import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.mockito.Mock;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.junit.jupiter.MockitoExtension;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureMockMvc;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.data.domain.PageImpl;
import org.springframework.data.domain.PageRequest;
import org.springframework.http.MediaType;
import org.springframework.security.test.context.support.WithMockUser;
import org.springframework.test.web.servlet.MockMvc;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.util.Base64Utils;
import javax.persistence.EntityManager;
import java.util.Collections;
import java.util.List;

import static org.assertj.core.api.Assertions.assertThat;
import static org.elasticsearch.index.query.QueryBuilders.queryStringQuery;
import static org.hamcrest.Matchers.hasItem;
import static org.springframework.security.test.web.servlet.request.SecurityMockMvcRequestPostProcessors.csrf;
import static org.mockito.Mockito.*;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.*;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.*;

/**
 * Integration tests for the {@link CrawlerPageResource} REST controller.
 */
@SpringBootTest(classes = Search2App.class)
@ExtendWith(MockitoExtension.class)
@AutoConfigureMockMvc
@WithMockUser
public class CrawlerPageResourceIT {

    private static final String DEFAULT_URL = "AAAAAAAAAA";
    private static final String UPDATED_URL = "BBBBBBBBBB";

    private static final String DEFAULT_MD_5 = "AAAAAAAAAA";
    private static final String UPDATED_MD_5 = "BBBBBBBBBB";

    private static final String DEFAULT_TITLE = "AAAAAAAAAA";
    private static final String UPDATED_TITLE = "BBBBBBBBBB";

    private static final String DEFAULT_SOURCE = "AAAAAAAAAA";
    private static final String UPDATED_SOURCE = "BBBBBBBBBB";

    @Autowired
    private CrawlerPageRepository crawlerPageRepository;

    /**
     * This repository is mocked in the pers.xsb.jhipster.search2.repository.search test package.
     *
     * @see pers.xsb.jhipster.search2.repository.search.CrawlerPageSearchRepositoryMockConfiguration
     */
    @Autowired
    private CrawlerPageSearchRepository mockCrawlerPageSearchRepository;

    @Autowired
    private EntityManager em;

    @Autowired
    private MockMvc restCrawlerPageMockMvc;

    private CrawlerPage crawlerPage;

    /**
     * Create an entity for this test.
     *
     * This is a static method, as tests for other entities might also need it,
     * if they test an entity which requires the current entity.
     */
    public static CrawlerPage createEntity(EntityManager em) {
        CrawlerPage crawlerPage = new CrawlerPage()
            .url(DEFAULT_URL)
            .md5(DEFAULT_MD_5)
            .title(DEFAULT_TITLE)
            .source(DEFAULT_SOURCE);
        return crawlerPage;
    }
    /**
     * Create an updated entity for this test.
     *
     * This is a static method, as tests for other entities might also need it,
     * if they test an entity which requires the current entity.
     */
    public static CrawlerPage createUpdatedEntity(EntityManager em) {
        CrawlerPage crawlerPage = new CrawlerPage()
            .url(UPDATED_URL)
            .md5(UPDATED_MD_5)
            .title(UPDATED_TITLE)
            .source(UPDATED_SOURCE);
        return crawlerPage;
    }

    @BeforeEach
    public void initTest() {
        crawlerPage = createEntity(em);
    }

    @Test
    @Transactional
    public void createCrawlerPage() throws Exception {
        int databaseSizeBeforeCreate = crawlerPageRepository.findAll().size();
        // Create the CrawlerPage
        restCrawlerPageMockMvc.perform(post("/api/crawler-pages").with(csrf())
            .contentType(MediaType.APPLICATION_JSON)
            .content(TestUtil.convertObjectToJsonBytes(crawlerPage)))
            .andExpect(status().isCreated());

        // Validate the CrawlerPage in the database
        List<CrawlerPage> crawlerPageList = crawlerPageRepository.findAll();
        assertThat(crawlerPageList).hasSize(databaseSizeBeforeCreate + 1);
        CrawlerPage testCrawlerPage = crawlerPageList.get(crawlerPageList.size() - 1);
        assertThat(testCrawlerPage.getUrl()).isEqualTo(DEFAULT_URL);
        assertThat(testCrawlerPage.getMd5()).isEqualTo(DEFAULT_MD_5);
        assertThat(testCrawlerPage.getTitle()).isEqualTo(DEFAULT_TITLE);
        assertThat(testCrawlerPage.getSource()).isEqualTo(DEFAULT_SOURCE);

        // Validate the CrawlerPage in Elasticsearch
        verify(mockCrawlerPageSearchRepository, times(1)).save(testCrawlerPage);
    }

    @Test
    @Transactional
    public void createCrawlerPageWithExistingId() throws Exception {
        int databaseSizeBeforeCreate = crawlerPageRepository.findAll().size();

        // Create the CrawlerPage with an existing ID
        crawlerPage.setId(1L);

        // An entity with an existing ID cannot be created, so this API call must fail
        restCrawlerPageMockMvc.perform(post("/api/crawler-pages").with(csrf())
            .contentType(MediaType.APPLICATION_JSON)
            .content(TestUtil.convertObjectToJsonBytes(crawlerPage)))
            .andExpect(status().isBadRequest());

        // Validate the CrawlerPage in the database
        List<CrawlerPage> crawlerPageList = crawlerPageRepository.findAll();
        assertThat(crawlerPageList).hasSize(databaseSizeBeforeCreate);

        // Validate the CrawlerPage in Elasticsearch
        verify(mockCrawlerPageSearchRepository, times(0)).save(crawlerPage);
    }


    @Test
    @Transactional
    public void getAllCrawlerPages() throws Exception {
        // Initialize the database
        crawlerPageRepository.saveAndFlush(crawlerPage);

        // Get all the crawlerPageList
        restCrawlerPageMockMvc.perform(get("/api/crawler-pages?sort=id,desc"))
            .andExpect(status().isOk())
            .andExpect(content().contentType(MediaType.APPLICATION_JSON_VALUE))
            .andExpect(jsonPath("$.[*].id").value(hasItem(crawlerPage.getId().intValue())))
            .andExpect(jsonPath("$.[*].url").value(hasItem(DEFAULT_URL)))
            .andExpect(jsonPath("$.[*].md5").value(hasItem(DEFAULT_MD_5)))
            .andExpect(jsonPath("$.[*].title").value(hasItem(DEFAULT_TITLE)))
            .andExpect(jsonPath("$.[*].source").value(hasItem(DEFAULT_SOURCE.toString())));
    }
    
    @Test
    @Transactional
    public void getCrawlerPage() throws Exception {
        // Initialize the database
        crawlerPageRepository.saveAndFlush(crawlerPage);

        // Get the crawlerPage
        restCrawlerPageMockMvc.perform(get("/api/crawler-pages/{id}", crawlerPage.getId()))
            .andExpect(status().isOk())
            .andExpect(content().contentType(MediaType.APPLICATION_JSON_VALUE))
            .andExpect(jsonPath("$.id").value(crawlerPage.getId().intValue()))
            .andExpect(jsonPath("$.url").value(DEFAULT_URL))
            .andExpect(jsonPath("$.md5").value(DEFAULT_MD_5))
            .andExpect(jsonPath("$.title").value(DEFAULT_TITLE))
            .andExpect(jsonPath("$.source").value(DEFAULT_SOURCE.toString()));
    }
    @Test
    @Transactional
    public void getNonExistingCrawlerPage() throws Exception {
        // Get the crawlerPage
        restCrawlerPageMockMvc.perform(get("/api/crawler-pages/{id}", Long.MAX_VALUE))
            .andExpect(status().isNotFound());
    }

    @Test
    @Transactional
    public void updateCrawlerPage() throws Exception {
        // Initialize the database
        crawlerPageRepository.saveAndFlush(crawlerPage);

        int databaseSizeBeforeUpdate = crawlerPageRepository.findAll().size();

        // Update the crawlerPage
        CrawlerPage updatedCrawlerPage = crawlerPageRepository.findById(crawlerPage.getId()).get();
        // Disconnect from session so that the updates on updatedCrawlerPage are not directly saved in db
        em.detach(updatedCrawlerPage);
        updatedCrawlerPage
            .url(UPDATED_URL)
            .md5(UPDATED_MD_5)
            .title(UPDATED_TITLE)
            .source(UPDATED_SOURCE);

        restCrawlerPageMockMvc.perform(put("/api/crawler-pages").with(csrf())
            .contentType(MediaType.APPLICATION_JSON)
            .content(TestUtil.convertObjectToJsonBytes(updatedCrawlerPage)))
            .andExpect(status().isOk());

        // Validate the CrawlerPage in the database
        List<CrawlerPage> crawlerPageList = crawlerPageRepository.findAll();
        assertThat(crawlerPageList).hasSize(databaseSizeBeforeUpdate);
        CrawlerPage testCrawlerPage = crawlerPageList.get(crawlerPageList.size() - 1);
        assertThat(testCrawlerPage.getUrl()).isEqualTo(UPDATED_URL);
        assertThat(testCrawlerPage.getMd5()).isEqualTo(UPDATED_MD_5);
        assertThat(testCrawlerPage.getTitle()).isEqualTo(UPDATED_TITLE);
        assertThat(testCrawlerPage.getSource()).isEqualTo(UPDATED_SOURCE);

        // Validate the CrawlerPage in Elasticsearch
        verify(mockCrawlerPageSearchRepository, times(1)).save(testCrawlerPage);
    }

    @Test
    @Transactional
    public void updateNonExistingCrawlerPage() throws Exception {
        int databaseSizeBeforeUpdate = crawlerPageRepository.findAll().size();

        // If the entity doesn't have an ID, it will throw BadRequestAlertException
        restCrawlerPageMockMvc.perform(put("/api/crawler-pages").with(csrf())
            .contentType(MediaType.APPLICATION_JSON)
            .content(TestUtil.convertObjectToJsonBytes(crawlerPage)))
            .andExpect(status().isBadRequest());

        // Validate the CrawlerPage in the database
        List<CrawlerPage> crawlerPageList = crawlerPageRepository.findAll();
        assertThat(crawlerPageList).hasSize(databaseSizeBeforeUpdate);

        // Validate the CrawlerPage in Elasticsearch
        verify(mockCrawlerPageSearchRepository, times(0)).save(crawlerPage);
    }

    @Test
    @Transactional
    public void deleteCrawlerPage() throws Exception {
        // Initialize the database
        crawlerPageRepository.saveAndFlush(crawlerPage);

        int databaseSizeBeforeDelete = crawlerPageRepository.findAll().size();

        // Delete the crawlerPage
        restCrawlerPageMockMvc.perform(delete("/api/crawler-pages/{id}", crawlerPage.getId()).with(csrf())
            .accept(MediaType.APPLICATION_JSON))
            .andExpect(status().isNoContent());

        // Validate the database contains one less item
        List<CrawlerPage> crawlerPageList = crawlerPageRepository.findAll();
        assertThat(crawlerPageList).hasSize(databaseSizeBeforeDelete - 1);

        // Validate the CrawlerPage in Elasticsearch
        verify(mockCrawlerPageSearchRepository, times(1)).deleteById(crawlerPage.getId());
    }

    @Test
    @Transactional
    public void searchCrawlerPage() throws Exception {
        // Configure the mock search repository
        // Initialize the database
        crawlerPageRepository.saveAndFlush(crawlerPage);
        when(mockCrawlerPageSearchRepository.search(queryStringQuery("id:" + crawlerPage.getId()), PageRequest.of(0, 20)))
            .thenReturn(new PageImpl<>(Collections.singletonList(crawlerPage), PageRequest.of(0, 1), 1));

        // Search the crawlerPage
        restCrawlerPageMockMvc.perform(get("/api/_search/crawler-pages?query=id:" + crawlerPage.getId()))
            .andExpect(status().isOk())
            .andExpect(content().contentType(MediaType.APPLICATION_JSON_VALUE))
            .andExpect(jsonPath("$.[*].id").value(hasItem(crawlerPage.getId().intValue())))
            .andExpect(jsonPath("$.[*].url").value(hasItem(DEFAULT_URL)))
            .andExpect(jsonPath("$.[*].md5").value(hasItem(DEFAULT_MD_5)))
            .andExpect(jsonPath("$.[*].title").value(hasItem(DEFAULT_TITLE)))
            .andExpect(jsonPath("$.[*].source").value(hasItem(DEFAULT_SOURCE.toString())));
    }
}
