File size: 2,608 Bytes
3e68ccf
 
 
 
114f32d
 
 
 
 
3e68ccf
114f32d
3e68ccf
 
 
114f32d
3e68ccf
 
 
 
114f32d
3e68ccf
 
 
 
 
114f32d
 
 
 
 
 
 
 
 
 
 
 
 
3e68ccf
 
114f32d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3e68ccf
114f32d
 
 
 
3e68ccf
114f32d
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
from clarifai_grpc.channel.clarifai_channel import ClarifaiChannel
from clarifai_grpc.grpc.api import resources_pb2, service_pb2, service_pb2_grpc
from clarifai_grpc.grpc.api.status import status_code_pb2

from global_config import GlobalConfig


CHANNEL = ClarifaiChannel.get_grpc_channel()
STUB = service_pb2_grpc.V2Stub(CHANNEL)

METADATA = (
    ('authorization', 'Key ' + GlobalConfig.CLARIFAI_PAT),
)

USER_DATA_OBJECT = resources_pb2.UserAppIDSet(
    user_id=GlobalConfig.CLARIFAI_USER_ID,
    app_id=GlobalConfig.CLARIFAI_APP_ID
)

RAW_TEXT = '''You are a helpful, intelligent chatbot. Create the slides for a presentation on the given topic. Include main headings for each slide, detailed bullet points for each slide. Add relevant content to each slide. Do not output any blank line.

Topic:
Talk about AI, covering what it is and how it works. Add its pros, cons, and future prospects. Also, cover its job prospects.
'''


def get_text_from_llm(prompt: str) -> str:
    post_model_outputs_response = STUB.PostModelOutputs(
        service_pb2.PostModelOutputsRequest(
            user_app_id=USER_DATA_OBJECT,  # The userDataObject is created in the overview and is required when using a PAT
            model_id=GlobalConfig.CLARIFAI_MODEL_ID,
            # version_id=MODEL_VERSION_ID,  # This is optional. Defaults to the latest model version
            inputs=[
                resources_pb2.Input(
                    data=resources_pb2.Data(
                        text=resources_pb2.Text(
                            raw=prompt
                        )
                    )
                )
            ]
        ),
        metadata=METADATA
    )

    if post_model_outputs_response.status.code != status_code_pb2.SUCCESS:
        print(post_model_outputs_response.status)
        raise Exception(f"Post model outputs failed, status: {post_model_outputs_response.status.description}")

    # Since we have one input, one output will exist here
    output = post_model_outputs_response.outputs[0]

    # print("Completion:\n")
    # print(output.data.text.raw)

    return output.data.text.raw


if __name__ == '__main__':
    topic = ('Talk about AI, covering what it is and how it works.'
             ' Add its pros, cons, and future prospects.'
             ' Also, cover its job prospects.'
             )
    print(topic)

    with open(GlobalConfig.SLIDES_TEMPLATE_FILE, 'r') as in_file:
        prompt_txt = in_file.read()
        prompt_txt = prompt_txt.replace('{topic}', topic)
        response_txt = get_text_from_llm(prompt_txt)

        print('Output:\n', response_txt)