/*
 * Copyright [2021-present] [ahoo wang <ahoowang@qq.com> (https://github.com/Ahoo-Wang)].
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *      http://www.apache.org/licenses/LICENSE-2.0
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

import {
  BaseURLCapable,
  ContentTypeValues,
  FetchExchange,
  NamedFetcher,
  REQUEST_BODY_INTERCEPTOR_ORDER,
  RequestInterceptor,
  ResultExtractors,
} from '@ahoo-wang/fetcher';
import {
  api,
  autoGeneratedError,
  body,
  post,
} from '@ahoo-wang/fetcher-decorator';
import '@ahoo-wang/fetcher-eventstream';
import { ChatRequest, ChatResponse } from './types';
import {
  JsonEventStreamResultExtractor,
  JsonServerSentEventStream,
} from '@ahoo-wang/fetcher-eventstream';

export const llmFetcherName = 'llm';

export interface LlmOptions extends BaseURLCapable {
  apiKey: string;
  model?: string;
}

export class LlmRequestInterceptor implements RequestInterceptor {
  readonly name: string = 'LlmRequestInterceptor';
  readonly order: number = REQUEST_BODY_INTERCEPTOR_ORDER - 1;

  constructor(private llmOptions: LlmOptions) {
  }

  intercept(exchange: FetchExchange): void {
    const chatRequest = exchange.request.body as ChatRequest;
    if (!chatRequest.model) {
      chatRequest.model = this.llmOptions.model;
    }
  }
}

export function createLlmFetcher(options: LlmOptions): NamedFetcher {
  const llmFetcher = new NamedFetcher(llmFetcherName, {
    baseURL: options.baseURL,
    headers: {
      Authorization: `Bearer ${options.apiKey}`,
      'Content-Type': ContentTypeValues.APPLICATION_JSON,
    },
  });
  llmFetcher.interceptors.request.use(new LlmRequestInterceptor(options));
  return llmFetcher;
}

@api('/chat', {
  fetcher: llmFetcherName,
  resultExtractor: JsonEventStreamResultExtractor,
})
export class LlmClient {
  @post('/completions')
  streamChat(
    @body() body: ChatRequest,
  ): Promise<JsonServerSentEventStream<ChatResponse>> {
    throw autoGeneratedError(body);
  }

  @post('/completions', { resultExtractor: ResultExtractors.Json })
  chat(@body() body: ChatRequest): Promise<ChatResponse> {
    throw autoGeneratedError(body);
  }
}
