Commit
•
be5264f
1
Parent(s):
5579546
Update unreferenced local variables
Browse files
chat_interface_preference.py
CHANGED
@@ -647,6 +647,8 @@ class ChatInterface(Blocks):
|
|
647 |
first_response = await async_iteration(generator)
|
648 |
if n_generations == 2:
|
649 |
first_response_formatted = self._get_chat_message_comparison(first_response, "")
|
|
|
|
|
650 |
if self.multimodal and isinstance(message, dict):
|
651 |
for x in message["files"]:
|
652 |
history.append([(x,), None])
|
@@ -665,11 +667,13 @@ class ChatInterface(Blocks):
|
|
665 |
async for response in generator:
|
666 |
if n_generations == 2:
|
667 |
response_formatted = self._get_chat_message_comparison(response, "")
|
|
|
|
|
668 |
if self.multimodal and isinstance(message, dict):
|
669 |
-
update = history + [[message["text"],
|
670 |
yield update, update
|
671 |
else:
|
672 |
-
update = history + [[message,
|
673 |
yield update, update
|
674 |
|
675 |
if n_generations == 2:
|
@@ -680,15 +684,15 @@ class ChatInterface(Blocks):
|
|
680 |
generator_two = SyncToAsyncIterator(generator, self.limiter)
|
681 |
try:
|
682 |
first_response_two = await async_iteration(generator_two)
|
683 |
-
|
684 |
if self.multimodal and isinstance(message, dict):
|
685 |
for x in message["files"]:
|
686 |
history.append([(x,), None])
|
687 |
|
688 |
-
update = history + [[message["text"],
|
689 |
yield update, update
|
690 |
else:
|
691 |
-
update = history + [[message,
|
692 |
yield update, update
|
693 |
except StopIteration:
|
694 |
if self.multimodal and isinstance(message, dict):
|
|
|
647 |
first_response = await async_iteration(generator)
|
648 |
if n_generations == 2:
|
649 |
first_response_formatted = self._get_chat_message_comparison(first_response, "")
|
650 |
+
else:
|
651 |
+
first_response_formatted = first_response
|
652 |
if self.multimodal and isinstance(message, dict):
|
653 |
for x in message["files"]:
|
654 |
history.append([(x,), None])
|
|
|
667 |
async for response in generator:
|
668 |
if n_generations == 2:
|
669 |
response_formatted = self._get_chat_message_comparison(response, "")
|
670 |
+
else:
|
671 |
+
response_formatted = response
|
672 |
if self.multimodal and isinstance(message, dict):
|
673 |
+
update = history + [[message["text"], response_formatted]]
|
674 |
yield update, update
|
675 |
else:
|
676 |
+
update = history + [[message, response_formatted]]
|
677 |
yield update, update
|
678 |
|
679 |
if n_generations == 2:
|
|
|
684 |
generator_two = SyncToAsyncIterator(generator, self.limiter)
|
685 |
try:
|
686 |
first_response_two = await async_iteration(generator_two)
|
687 |
+
first_response_two_formatted = self._get_chat_message_comparison(response, first_response_two)
|
688 |
if self.multimodal and isinstance(message, dict):
|
689 |
for x in message["files"]:
|
690 |
history.append([(x,), None])
|
691 |
|
692 |
+
update = history + [[message["text"], first_response_two_formatted]]
|
693 |
yield update, update
|
694 |
else:
|
695 |
+
update = history + [[message, first_response_two_formatted]]
|
696 |
yield update, update
|
697 |
except StopIteration:
|
698 |
if self.multimodal and isinstance(message, dict):
|