File size: 31,089 Bytes
7544e55
 
 
 
 
 
 
c79fcf6
 
 
0477f5c
02fba21
6a74752
02fba21
 
 
 
 
3fe77e9
24369dc
3fe77e9
 
24369dc
ad7aff4
 
867c06e
 
ad7aff4
 
46fabd3
59fb6b1
02fba21
46fabd3
7544e55
 
 
 
 
 
 
02fba21
46fabd3
7544e55
 
 
02fba21
ef45551
46fabd3
cb9aed1
46fabd3
02fba21
 
 
7544e55
02fba21
 
 
 
 
 
 
dc4d307
02fba21
dc4d307
02fba21
 
dc4d307
02fba21
 
dc4d307
02fba21
 
dc4d307
27cc9ba
24369dc
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
ad7aff4
 
02fba21
f8d7414
02fba21
d13b502
02fba21
dae0c7b
dc4d307
1819bc1
 
 
 
02fba21
6a74752
 
 
02fba21
 
 
 
c7238ad
02fba21
 
 
b4e53e3
 
02fba21
 
1d48d9e
c7238ad
 
dc4d307
02fba21
 
46fabd3
02fba21
 
 
 
 
 
 
 
 
 
7966437
02fba21
dc4d307
 
867c06e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
ad7aff4
bb7f65d
 
867c06e
ad7aff4
bb7f65d
3d34ce4
867c06e
3d34ce4
bb7f65d
ad7aff4
bb7f65d
3d34ce4
 
 
 
 
ad7aff4
bb7f65d
3d34ce4
ad7aff4
bb7f65d
3fe77e9
f1403c7
 
 
 
 
 
 
 
 
 
e9922e5
 
 
 
f1403c7
 
 
 
a5cf43f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
f1403c7
 
e9922e5
 
 
 
 
f1403c7
 
e9922e5
 
 
 
f1403c7
 
 
a5cf43f
 
 
 
 
 
 
 
 
 
 
f1403c7
 
e9922e5
 
 
 
 
 
 
 
 
 
bb7f65d
f1403c7
bb7f65d
24f3038
bb7f65d
 
 
3d34ce4
24f3038
3d34ce4
3fe77e9
3d34ce4
 
 
 
 
1819bc1
3d34ce4
 
 
 
3fe77e9
bb7f65d
 
 
3fe77e9
02fba21
 
 
46fabd3
02fba21
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
46fabd3
96dce28
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
27d3356
 
 
 
96dce28
27d3356
 
96dce28
 
27d3356
 
 
 
 
 
 
 
 
96dce28
2d609c7
27d3356
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2d609c7
27d3356
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2d609c7
27d3356
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
6a74752
02fba21
 
 
 
 
31d0837
02fba21
 
 
46fabd3
02fba21
 
 
 
 
 
 
 
a21951b
 
 
 
 
 
 
 
 
 
ac26dc9
a21951b
 
 
 
 
 
 
02fba21
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
8653861
c1ada5f
02fba21
 
 
 
 
 
c9e4e4b
 
 
 
 
 
 
 
 
 
 
 
 
 
02fba21
 
08d1b40
02fba21
08d1b40
0aab4d9
08d1b40
2f50c89
27d3356
2f50c89
 
 
 
 
 
 
 
 
 
 
 
 
08d1b40
27d3356
08d1b40
 
 
 
 
27d3356
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2f50c89
27d3356
2f50c89
27d3356
 
 
 
 
2f50c89
27d3356
0aab4d9
b0f04a4
27d3356
 
 
 
 
 
 
b0f04a4
0aab4d9
08d1b40
02fba21
08d1b40
27d3356
 
 
2810149
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
34e9709
877947f
7f8975b
 
 
 
 
 
 
 
 
2810149
 
b4e53e3
 
7f8975b
 
 
2810149
 
 
877947f
2810149
877947f
c7238ad
2da4273
877947f
 
 
2810149
877947f
7f8975b
96dce28
2810149
877947f
2810149
 
 
 
877947f
c7238ad
 
877947f
 
 
2810149
877947f
96dce28
2810149
7f8975b
 
 
877947f
 
 
 
7f8975b
 
2810149
 
 
 
7f8975b
 
 
 
877947f
7f8975b
 
2810149
6497f6d
7f8975b
02fba21
5491224
27d3356
 
 
 
 
 
 
 
 
 
 
 
 
 
02fba21
27d3356
 
 
 
 
 
 
 
 
 
 
 
2d609c7
 
27d3356
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
02fba21
27d3356
55a2653
27d3356
 
 
02fba21
 
27d3356
02fba21
06e510f
02fba21
 
27d3356
 
 
02fba21
27d3356
 
 
 
 
3ce78c6
02fba21
27d3356
 
 
 
 
 
 
 
 
02fba21
27d3356
 
 
02fba21
27d3356
 
 
 
02fba21
27d3356
 
 
 
3ce78c6
27d3356
 
02fba21
27d3356
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
from fastapi import FastAPI, HTTPException, UploadFile, File, Request, BackgroundTasks, Form
from fastapi.middleware.cors import CORSMiddleware
from pydantic import BaseModel
from httpx import Timeout
import httpx
import logging
import base64
import firebase_admin
from firebase_admin import credentials
from firebase_admin import firestore
import json
from datetime import datetime
import pytz

from typing import List, Optional, Dict
from google.oauth2 import service_account
from googleapiclient.discovery import build

from io import BytesIO
import google.generativeai as genai
import os
import requests


from google.cloud import vision
import aiohttp
from typing import Tuple


# start application
app = FastAPI()

# allow all origins
app.add_middleware(
    CORSMiddleware,
    allow_origins=["*"],  # Allows all origins
    allow_credentials=True,  # Allows credentials (such as cookies) to be sent with requests
    allow_methods=["*"],  # Allows all methods
    allow_headers=["*"],  # Allows all headers
)

# set up logging
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
logging.info("Application starting up")

request_timeout = Timeout(1000.0, connect=600.0)

# set up firebase


# Load Firebase configuration from a file
with open('/app/secrets/fb_secret.json', 'r') as f:
    firebase_configuration = json.load(f)

firebase_credentials = credentials.Certificate(firebase_configuration)
firebase_admin.initialize_app(firebase_credentials, {
    'projectId': firebase_configuration['project_id'],
})
firestore_db = firestore.client()
users_collection_ref = firestore_db.collection('users')
transactions_collection_ref = firestore_db.collection('transactions')

SCOPES = ['https://www.googleapis.com/auth/spreadsheets','https://www.googleapis.com/auth/drive']

with open('/app/secrets/openai_key.txt', 'r') as f:
    OPENAI_KEY = f.read()

APICredential = service_account.Credentials.from_service_account_file(
    '/app/secrets/googleapi_secret.json', scopes=SCOPES)

sheet_service = build('sheets', 'v4', credentials=APICredential)
drive_service = build('drive', 'v3', credentials=APICredential)

with open('/app/secrets/gemini_key.txt', 'r') as f:
    api_key = f.read()

genai.configure(api_key=api_key)

# Create the model
generation_config = {
  "temperature": 1,
  "top_p": 0.95,
  "top_k": 64,
  "max_output_tokens": 8192,
  "response_mime_type": "application/json",
}

model = genai.GenerativeModel(
  model_name="gemini-1.5-flash",
  generation_config=generation_config,
  # safety_settings = Adjust safety settings
  # See https://ai.google.dev/gemini-api/docs/safety-settings
)



# first three are "๋“ฑ๋ก์ผ์‹œ", "์ธ๋„ค์ผ", "๋ฐ”๋กœ๊ฐ€๊ธฐ", 
receipt_sheet_headers = ['๋ฐœํ–‰์ผ', '์ƒํ˜ธ', '์‚ฌ์—…์ž๋ฒˆํ˜ธ', 'ํ•ฉ๊ณ„๊ธˆ์•ก', '๋‚ด์—ญ', '์นด๋“œ๋ฒˆํ˜ธ', '์นด๋“œ์ข…๋ฅ˜', '์Šน์ธ๋ฒˆํ˜ธ', '๊ธฐํƒ€']
# at index 1 '๋“ฑ๋ก ์ผ์‹œ', at last index '๋ช…ํ•จ๋ณด๊ธฐ'
business_card_sheet_headers = ['์ด๋ฆ„', 'ํšŒ์‚ฌ๋ช…', '์†Œ์† ๋ฐ ์ง์œ„', '์ฃผ์†Œ', '์ „ํ™”' ,'ํœด๋Œ€ํฐ', 'ํŒฉ์Šค','e-mail','ํ™ˆํŽ˜์ด์ง€']
sms_sheet_headers = ['๋‚ ์งœ', '๊ณ„์ขŒ', '์ž…๊ธˆ', '์ถœ๊ธˆ', '์ž”์•ก', '๋ฌธ์ž']
contact_sheet_headers = ['์ด๋ฆ„',	'์ „ํ™”๋ฒˆํ˜ธ1',	'์ „ํ™”๋ฒˆํ˜ธ2',	'์ „ํ™”๋ฒˆํ˜ธ3',	'์ด๋ฉ”์ผ์ฃผ์†Œ1',	'์ด๋ฉ”์ผ์ฃผ์†Œ2',	'์ด๋ฉ”์ผ์ฃผ์†Œ3',	'๊ทธ๋ฃน']

receipts_ss = 1395967745
business_cards_ss = 1733555840
contacts_ss = 1729750548
sms_ss = 1891574758

kst = pytz.timezone('Asia/Seoul')


class SheetUpdateRequest(BaseModel):
    target_sheet_id: int
    data: list

def convert_dicts_to_list(dicts: List[Dict], headers: List[str], add_link: bool = False, 
                          image_id: str = '', link_text: str = '', 
                          link_position: int = -1) -> List[List[str]]:
    rows = []
    # timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
    timestamp = datetime.now(kst).strftime("%Y-%m-%d %H:%M:%S")
    
    for dict_item in dicts:
        # row = list(dict_item.values())
        # row = [str(value) for value in dict_item.values()]
        row = [str(dict_item.get(header, '')) for header in headers]
        
        # Always add timestamp to the first index
        row.insert(0, timestamp)
        
        if add_link:
            # Ensure the list length is at least 9
            while len(row) < 9:
                row.append('')
            if link_position == -1:  # Append link at the end
                row.append(f'=HYPERLINK("https://drive.google.com/file/d/{image_id}/view?usp=sharing", "{link_text}")')
            else:
                row.insert(link_position, f'=HYPERLINK("https://drive.google.com/file/d/{image_id}/view?usp=sharing", "{link_text}")')
                
        rows.append(row)
    # print(rows)
    return rows



async def download_file_from_drive(file_id: str) -> Tuple[bytes, str]:
    """
    Downloads a file from Google Drive using the file ID
    and returns the file content as bytes and the MIME type.
    """
    try:
        # URL for downloading the file from Google Drive
        url = f"https://drive.google.com/uc?export=download&id={file_id}"

        async with aiohttp.ClientSession() as session:
            async with session.get(url) as response:
                if response.status == 200:
                    content = await response.read()
                    mime_type = response.headers.get('Content-Type', 'application/octet-stream')
                    return content, mime_type
                else:
                    raise HTTPException(status_code=500, detail=f"Failed to download file, status code: {response.status}")

    except Exception as e:
        logging.error(f"Error downloading file from Drive: {e}")
        raise HTTPException(status_code=500, detail="Failed to download file from Drive")

async def request_gpt4o_completion(image_id: str, user_credit: int):
    try:
        # Step 1: Download the image file from Google Drive
        file_content, mime_type = await download_file_from_drive(image_id)

        # Step 3: Get text data from the image using Google Cloud Vision
        client = vision.ImageAnnotatorClient()
        image = vision.Image(content=file_content)
        response = client.text_detection(image=image)
        texts = response.text_annotations

        if response.error.message:
            logging.error(f"Error in Google Cloud Vision API: {response.error.message}")
            raise Exception(
                "{}\nFor more info on error messages, check: "
                "https://cloud.google.com/apis/design/errors".format(response.error.message)
            )

        # Extract all detected text
        extracted_text = texts[0].description if texts else ""

        # Step 4: Prepare the prompt for Gemini
        prompt = f'''
        #Direction
        You are an employee of finance department of a company. For each task well done, you will be rewarded $1,000 bonus. 
        The boss has come to your office and gave you a task of transfering the content of receipts and business cards onto a google spreadsheet.
        He hands you two things. 
        One, is a paper containing guideline and example.
        Two, is an OCR text.

        #Guideline
        -strictly adhere to the given content of OCR text.
        -it could be of receipt(s) or business card(s) but not both
        -sort any other info that couldn't be classified as "๊ธฐํƒ€"
        -format the extracted information as a JSON object with two main keys: 'receipts' and 'busi_cards'.
        -ensure the total number of receipts and business cards combined does not exceed 500.


        #Example
        ##Receipts
        Input
        ์€์„ฑ๋งˆ์ผ“
        ์‚ฌ์—…์ž๋ฒˆํ˜ธ: 727-27-01455
        ๋Œ€ํ‘œ์ž:ํ—ˆ์œ ์„ฑ
        ์ฃผ
        ์†Œ: ๊ฒฝ๊ธฐ ์‹œํฅ์‹œ ๋ฐฐ๊ณง3๋กœ 96
        ์ „ํ™” ๋ฒˆํ˜ธ: 031-431-1544
        ํŒ๋งค์ผ: 24-02-23 20:10, ๊ธˆ์š”์ผ ๊ณ„์‚ฐ๋Œ€:001
        NO. ์ƒํ’ˆ๋ช…
        ๋‹จ๊ฐ€ ์ˆ˜๋Ÿ‰ ๊ธˆ์•ก
        001 ๋Šํƒ€๋ฆฌ๋ฒ„์„ฏ
        200092
        1,000
        1. 1,000 #
        002 ์–‘๋ฐฐ์ถ”
        200071
        (#)๋ฉด์„ธ๋ฌผํ’ˆ:
        ํ•ฉ
        ๊ณ„:
        ์‹ ์šฉ์นด๋“œ์ง€๋ถˆ:
        โ˜…์นด๋“œ์‹ ์šฉ์Šน์ธ
        ็ซ
        1,800 1 1.800 #
        2,800
        2,800
        2,800
        ๊ฐ€๋งน์ ๋ช… ์€์„ฑ๋งˆ์ผ“
        ์นด๋“œ๋ฒˆํ˜ธ
        4673-09**-****-515*
        ์นด๋“œ๋ช… : KB๊ตญ๋ฏผ์ฒดํฌ
        ์ „ํ‘œ๋งค์ž…: KB๊ตญ๋ฏผ์นด๋“œ ๋ฌด์„œ๋ช…: 00111521584
        ์Šน์ธ๊ธˆ์•ก: 2,800
        (์ผ์‹œ๋ถˆ)
        ์Šน์ธ๋ฒˆํ˜ธ: 30014507, ์ „ํ‘œNo:201036
        ์ œ์ถœ VAN: JTNET-๋ฆฌ๋”๊ธฐ
        ์ •์ƒ์Šน์ธ
        ๊ฐ์‚ฌํ•ฉ๋‹ˆ๋‹ค.
        ๊ฑฐ๋ž˜NO:0223007835 ๊ณ„์‚ฐ์›: ๊ด€๋ฆฌ์ž(001)
        2502230078357

        Output
        ๋ฐœํ–‰์ผ: 2024/02/23 20:10
        ์ƒํ˜ธ: ์€์„ฑ๋งˆ์ผ“
        ์‚ฌ์—…์ž๋ฒˆํ˜ธ: 727-27-01455
        ํ•ฉ๊ณ„๊ธˆ์•ก: 2,800
        ๋‚ด์—ญ: 
            ๋Šํƒ€๋ฆฌ๋ฒ„์„ฏ 1,000์› 1๊ฐœ
            ์–‘๋ฐฐ์ถ” 1,800์› 1๊ฐœ
        ์นด๋“œ๋ฒˆํ˜ธ: 4673-09**-****-515*
        ์นด๋“œ์ข…๋ฅ˜: KB๊ตญ๋ฏผ์ฒดํฌ
        ์Šน์ธ๋ฒˆํ˜ธ: 30014507
        ๊ธฐํƒ€: ๋Œ€ํ‘œ์ž: ํ—ˆ์œ ์„ฑ, ์ฃผ์†Œ: ๊ฒฝ๊ธฐ ์‹œํฅ์‹œ ๋ฐฐ๊ณง3๋กœ 96, ์ „ํ™” ๋ฒˆํ˜ธ: 031-431-1544, ๋ฉด์„ธ๋ฌผํ’ˆ: 2,800, ํ•ฉ๊ณ„: 2,800, ์‹ ์šฉ์นด๋“œ์ง€๋ถˆ:2,800, ์ „ํ‘œNo:201036, ์ œ์ถœ VAN: JTNET-๋ฆฌ๋”๊ธฐ, ๊ฑฐ๋ž˜NO:0223007835 ๊ณ„์‚ฐ์›: ๊ด€๋ฆฌ์ž(001)

        ##Business Cards
        Input
        ๋ฐ•์žฅ์›
        ์•„์ดํ”„๋ฆฌ๋งˆ
        ํŒ€์žฅ/์ •์ฑ…์‚ฌ์—…ํŒ€
        R&D์‚ฌ์—…์‹ค
        Over the MIRACLE
        ๊ฒฝ๊ธฐ๋„ ํ™”์„ฑ์‹œ ์ •๋‚จ๋ฉด ๋งŒ๋…„๋กœ 98๋ฒˆ๊ธธ 55 10์ธต (์šฐ) 18523
        Mobile 010-9582-0925
        Tel (031) 000-0000
        Fax (031) 000-0000
        Email iprima@iprima.com
        www.iprima.com

        Output
        ์ด๋ฆ„: ๋ฐ•์žฅ์›
        ํšŒ์‚ฌ๋ช…: ์•„์ดํ”„๋ฆฌ๋งˆ
        ์†Œ์† ๋ฐ ์ง์œ„: ํŒ€์žฅ/์ •์ฑ…์‚ฌ์—…ํŒ€ R&D์‚ฌ์—…์‹ค
        ์ฃผ์†Œ: ๊ฒฝ๊ธฐ๋„ ํ™”์„ฑ์‹œ ์ •๋‚จ๋ฉด ๋งŒ๋…„๋กœ 98๋ฒˆ๊ธธ 55 (์šฐํŽธ๋ฒˆํ˜ธ) 18523
        ์ „ํ™”: (031) 000-0000
        ํœด๋Œ€ํฐ: (031) 000-0000
        ํŒฉ์Šค: 010-0000-0000
        e-mail: iprima@gmail.com
        ํ™ˆํŽ˜์ด์ง€: www.iprima.com
        ๊ธฐํƒ€: Over the MIRACLE

        Image content: {extracted_text}
        '''
        logging.info("Prompt prepared for Gemini model: " + prompt)

        # Generate content using the Gemini model
        response = model.generate_content(
            contents=[
                prompt
            ]
        )

        # Check if the response has text content
        if response.text:
            logging.info("Parsing Gemini model response")
            json_response = json.loads(response.text)
            logging.info(f"JSON response parsed: {response.text}")
            return json.dumps(json_response)
        else:
            logging.error("Gemini model did not return a text response")
            raise Exception("Gemini model did not return a text response")

    except Exception as e:
        logging.error(f"Error in request_gpt4o_completion: {str(e)}")
        raise HTTPException(status_code=500, detail=f"Failure: {str(e)}")

    
async def move_file_to_folder(file_id, current_parents, new_parents):
    try:

        # Ensure current_parents is a list of parent IDs
        if not isinstance(current_parents, list):
            current_parents = [current_parents]
        if not isinstance(new_parents, list):
            new_parents = [new_parents]

        # Retrieve the existing parents to remove
        file = drive_service.files().get(fileId=file_id, fields='parents').execute()
        existing_parents = file.get('parents')
        logging.info(f"Existing parents of the file {file_id}: {existing_parents}")

        # Remove only the current parents
        parents_to_remove = [parent for parent in current_parents if parent in existing_parents]
        parents_to_add = [parent for parent in new_parents if parent not in existing_parents]

        previous_parents = ",".join(parents_to_remove)
        logging.info(f"Parents to remove: {parents_to_remove}")
        logging.info(f"Parents to add: {parents_to_add}")

        # Move the file to the new folders while keeping it in yungsoogi folder
        if parents_to_add:
            file = drive_service.files().update(
                fileId=file_id,
                addParents=",".join(parents_to_add),
                removeParents=previous_parents,
                fields='id, parents'
            ).execute()
            logging.info(f"File {file_id} moved to new folders {new_parents}.")
        else:
            logging.info(f"File {file_id} already in the target folders {new_parents}.")
    except Exception as e:
        logging.error(f"Failed to move file {file_id} to new folders: {e}")

async def update_user_sheet(spreadsheet_id:str, sheet_name:str, data: List[List[str]], is_reset: Optional[bool] = False):
    print(f"Spreadsheet ID: {spreadsheet_id}, Sheet Name: {sheet_name}, Data: {data}")
    
    # Get all the sheets in the spreadsheet
    sheet_metadata = sheet_service.spreadsheets().get(spreadsheetId=spreadsheet_id).execute()
    sheets = sheet_metadata.get('sheets', '')
    
    # Find the sheet by name
    sheet = next((sheet for sheet in sheets if sheet['properties']['title'] == sheet_name), None)
    
    if not sheet:
        raise ValueError(f"Sheet '{sheet_name}' not found in the spreadsheet.")
    
    sheet_id = sheet['properties']['sheetId']
    
    # Rest of the function remains the same
    no_of_rows = len(data)
    no_of_cols = len(data[0])

    if is_reset:
        new_sheet_name = f"{sheet_name} {datetime.now(kst).strftime('%Y-%m-%d %H:%M')}"
        duplicate_requests = [{
            "duplicateSheet": {
                "sourceSheetId": sheet_id,
                "newSheetName": new_sheet_name
            }
        }]
        duplicate_response = sheet_service.spreadsheets().batchUpdate(
            spreadsheetId=spreadsheet_id,
            body={
                'requests': duplicate_requests
            }
        ).execute()
        sheet_id = duplicate_response['replies'][0]['duplicateSheet']['properties']['sheetId']
        sheet_name = new_sheet_name

    # Insert new rows
    requests = [
        {
        "updateSheetProperties": {
            "properties": {
                "sheetId": sheet_id,
                "hidden": False
            },
            "fields": "hidden"
        }
    },
        {
        "insertDimension": {
            "range": {
                "sheetId": sheet_id,
                "dimension": "ROWS",
                "startIndex": 1,
                "endIndex": no_of_rows + 1
            },
            "inheritFromBefore": False,
        }
    }]
    
    response = sheet_service.spreadsheets().batchUpdate(
        spreadsheetId=spreadsheet_id,
        body={
            'requests': requests
        }
    ).execute()
    
    update_body = {
        'values': data
    }
    start_column = 'A'
    end_column = chr(ord(start_column) + no_of_cols - 1)
    range_to_update = f'{sheet_name}!{start_column}2:{end_column}{no_of_rows + 1}'
    
    result = sheet_service.spreadsheets().values().update(
        spreadsheetId=spreadsheet_id,
        range=range_to_update,
        valueInputOption='USER_ENTERED',
        body=update_body
    ).execute()

    # Format the cells
    for row_index, row in enumerate(data):
        for col_index, cell_value in enumerate(row):
            if cell_value.startswith('=HYPERLINK'):
                color = {"red": 0, "green": 0, "blue": 1}  # Blue for hyperlinks
            else:
                color = {"red": 0, "green": 0, "blue": 0}  # Black for other text
            
            requests = [
                {
                    "repeatCell": {
                        "range": {
                            "sheetId": sheet_id,
                            "startRowIndex": row_index + 1,
                            "endRowIndex": row_index + 2,
                            "startColumnIndex": col_index,
                            "endColumnIndex": col_index + 1
                        },
                        "cell": {
                            "userEnteredFormat": {
                                "textFormat": {
                                    "foregroundColor": color
                                }
                            }
                        },
                        "fields": "userEnteredFormat.textFormat.foregroundColor"
                    }
                }
            ]
            
            response = sheet_service.spreadsheets().batchUpdate(
                spreadsheetId=spreadsheet_id,
                body={
                    'requests': requests
                }
            ).execute()

async def copy_sms_sheet(phone_no: str, spreadsheet_id: str):
    requests = [
        {
            "duplicateSheet": {
                "sourceSheetId": sms_ss,
                "newSheetName": 'SMS ' + phone_no
            }
        }
    ]
    
    response = sheet_service.spreadsheets().batchUpdate(
        spreadsheetId=spreadsheet_id,
        body={
            'requests': requests
        }
    ).execute()
    print(response)
    new_sheet_id = response['replies'][0]['duplicateSheet']['properties']['sheetId']

    unhide_request = [{
        "updateSheetProperties": {
            "properties": {
                "sheetId": new_sheet_id,
                "hidden": False
            },
            "fields": "hidden"
        }
    }]

    sheet_service.spreadsheets().batchUpdate(
        spreadsheetId=spreadsheet_id,
        body={
            'requests': unhide_request
        }
    ).execute()

    return new_sheet_id
    
async def update_fb_transaction(transaction_ref: firestore.DocumentReference, no_sms: Optional[int] = 0, no_contacts: Optional[int] = 0, no_receipts: Optional[int] = 0, no_business_cards: Optional[int] = 0):
    try:
        # Update the transaction
        transaction_ref.update({
            "no_sms": firestore.Increment(no_sms),
            "no_contacts": firestore.Increment(no_contacts),
            "no_receipts": firestore.Increment(no_receipts),
            "no_business_cards": firestore.Increment(no_business_cards)
        })

        return {"status": "Success"}
    except Exception as e:
        logging.error(f"Error updating transaction: {e}")
        return {"status": f"Error: {str(e)}"}

    
@app.post("/check-userdoc")
async def check_user_doc(user_id: str = Form(...)):
    user_ref = users_collection_ref.document(user_id)
    user = user_ref.get()
    if user.exists:
        return {"dbUserExists": True}
    transaction_ref = transactions_collection_ref.document(user_id)
    transaction = transaction_ref.get()
    if not transaction.exists:
        transaction_ref.set({
            "no_sms": 0,
            "no_contacts": 0,
            "no_receipts": 0,
            "no_business_cards": 0,
            "purchased_credit": {
                "image_detection": 100,
                "sync_data": 500,
            },
            "uid": user_id,
        })
    return {"dbUserExists": False}

template_sheet_id = '1i5mrmlTs5sPWtx2mBtc_f-zm1D3x21r1T_77guki8_8'

@app.post("/edit-spreadsheet")
async def edit_spreadsheet(target_sheet_id: str = Form(...)):
    try:
        # Retrieve the target spreadsheet's metadata
        target_spreadsheet = sheet_service.spreadsheets().get(spreadsheetId=target_sheet_id).execute()

        # Collect all sheet IDs in the target spreadsheet
        target_sheet_ids = [sheet['properties']['sheetId'] for sheet in target_spreadsheet.get('sheets', [])]

        # Prepare batch update request to delete existing sheets
        requests = []
        for sheet_id in target_sheet_ids:
            requests.append({
                "deleteSheet": {
                    "sheetId": sheet_id
                }
            })

        # Retrieve the template spreadsheet's metadata
        template_spreadsheet = sheet_service.spreadsheets().get(spreadsheetId=template_sheet_id).execute()

        # Iterate through each sheet in the template spreadsheet
        for sheet in template_spreadsheet.get('sheets', []):
            sheet_id = sheet['properties']['sheetId']

            # Use the copyTo method to copy the sheet to the target spreadsheet
            copy_request = sheet_service.spreadsheets().sheets().copyTo(
                spreadsheetId=template_sheet_id,
                sheetId=sheet_id,
                body={"destinationSpreadsheetId": target_sheet_id}
            )
            copy_response = copy_request.execute()

            # Get the copied sheet ID
            copied_sheet_id = copy_response['sheetId']

            # Rename the copied sheet to remove "Copy of"
            copied_sheet_title = sheet['properties']['title'].replace("Copy of ", "")
            rename_request = {
                "updateSheetProperties": {
                    "properties": {
                        "sheetId": copied_sheet_id,
                        "title": copied_sheet_title
                    },
                    "fields": "title"
                }
            }

            # Execute rename request
            sheet_service.spreadsheets().batchUpdate(
                spreadsheetId=target_sheet_id,
                body={"requests": [rename_request]}
            ).execute()

            logging.info(f"Sheet {sheet['properties']['title']} copied and renamed to {copied_sheet_title} in {target_sheet_id}.")

        if requests:
            # Execute batch update to delete sheets
            batch_update_request = {"requests": requests}
            sheet_service.spreadsheets().batchUpdate(
                spreadsheetId=target_sheet_id,
                body=batch_update_request
            ).execute()
            logging.info(f"Deleted existing sheets in target spreadsheet {target_sheet_id}.")
            
        logging.info(f"All sheets from template {template_sheet_id} copied to {target_sheet_id}.")
        return {"message": "Spreadsheet copied successfully"}

    except Exception as e:
        logging.error(f"Failed to copy sheets: {e}")
        return {"error": str(e)}
    
async def create_shortcut(file_id, parent_folder_id, name):
    try:
        file_metadata = {
            'name': name,
            'mimeType': 'application/vnd.google-apps.shortcut',
            'parents': [parent_folder_id],
            'shortcutDetails': {
                'targetId': file_id
            }
        }
        shortcut = drive_service.files().create(body=file_metadata, fields='id').execute()
        return shortcut.get('id')
    except Exception as e:
        logging.error(f"Failed to create shortcut: {e}")
        return None

async def update_user_sheets_and_folders(user_data, transaction_ref, transaction_data, image_id):
    try:
        parent_folders = [user_data['gDrive_metadata']['yungsoogi'], user_data['gDrive_metadata']['uploaded']]
        dataJson = await request_gpt4o_completion(image_id, transaction_data['purchased_credit']['image_detection'])
        if dataJson is None:
            return {"error": "An error occurred while processing the image"}
        
        data = json.loads(dataJson)
        # Check if 'receipts' key exists and the length
        found_receipt_no = len(data['receipts']) if 'receipts' in data else 0
        found_business_cards_no = len(data['busi_cards']) if 'busi_cards' in data else 0
        new_folder = None
        secondary_folder = None
        # timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
        timestamp = datetime.now(kst).strftime("%Y-%m-%d %H:%M:%S")
        # https://drive.google.com/file/d/1cFIA09PBqFjM8YvIz1D60p7HMGk1uBvo/view?usp=sharing
        download_image_url = f"https://drive.google.com/uc?id={image_id}&export=download"
        image_url = f"https://drive.google.com/file/d/{image_id}/view?usp=sharing"
        # ์ด๋ฏธ์ง€๋ฅผ ๋ณด์—ฌ์ฃผ์–ด์•ผ ํ• ๋•Œ =image({image_url})๋ฅผ ์‚ฌ์šฉํ•˜๊ณ  ๊ฐˆ ์ˆ˜ ์žˆ๋Š” ๊ฒฝ๋กœ๋ฅผ ๋งŒ๋“ค๋•Œ๋Š” =HYPERLINK({image_url}, '๋ฐ”๋กœ๊ฐ€๊ธฐ')๋ฅผ ์‚ฌ์šฉํ•˜์„ธ์š”.

        # Determine the primary folder to move the image to and secondary folder for shortcut
        if found_receipt_no > 0:
            new_folder = user_data['gDrive_metadata']['receipts']
            receipts_data = convert_dicts_to_list(
                data['receipts'],
                receipt_sheet_headers,
                add_link=True, 
                image_id=image_id, 
                link_text="๋ฐ”๋กœ๊ฐ€๊ธฐ",
                link_position=1  # Link third
            )
            print(receipts_data)
            await update_user_sheet(user_data['gDrive_metadata']['spreadsheet'], "์˜์ˆ˜์ฆ", receipts_data)

        if found_business_cards_no > 0:
            if new_folder is None:
                new_folder = user_data['gDrive_metadata']['business_cards']
            else:
                secondary_folder = user_data['gDrive_metadata']['business_cards']
            business_cards_data = convert_dicts_to_list(
                data['busi_cards'],
                business_card_sheet_headers,
                add_link=True, 
                image_id=image_id, 
                link_text="๋ช…ํ•จ๋ณด๊ธฐ",
                link_position=-1  # Link at the end
            )
            await update_user_sheet(user_data['gDrive_metadata']['spreadsheet'], "๋ช…ํ•จ", business_cards_data)

        print(f"{found_receipt_no}, {found_business_cards_no}")
    
        status = await update_fb_transaction(
            transaction_ref,
            no_receipts=found_receipt_no,
            no_business_cards=found_business_cards_no
        )
    
        if status['status'] == "Success":
            if new_folder:
                await move_file_to_folder(image_id, parent_folders, [new_folder])
                if secondary_folder:
                    await create_shortcut(image_id, secondary_folder, f"Shortcut to {image_id}")
            return {"success": True}
        else:
            logging.error(f"Transaction update failed with status: {status['status']}")
            return {"error": "An error has occurred while updating the transaction"}
    except Exception as e:
        logging.error(f"An error occurred: {e}")
        # Move the image file to the error folder
        await move_file_to_folder(image_id, parent_folders, [user_data['gDrive_metadata']['error']])
        return {"error": str(e)}

@app.post("/process-image")
async def process_photo(background_tasks: BackgroundTasks, image_id: str = Form(...), user_id: str = Form(...)):
    transaction_ref = transactions_collection_ref.document(user_id)
    transaction = transaction_ref.get()
    
    if not transaction.exists:
        transaction_ref.set({
            "no_sms": 0,
            "no_contacts": 0,
            "no_receipts": 0,
            "no_business_cards": 0,
            "purchased_credit": {
                "image_detection": 100,
                "sync_data": 500,
            }
        })
        transaction = transaction_ref.get()
    
    transaction_data = transaction.to_dict()
    
    if transaction_data['no_receipts'] + transaction_data['no_business_cards'] >= transaction_data['purchased_credit']['image_detection']:
        return {"error": "You have reached the limit of the number of items you can process. Please upgrade your plan."}
    
    user_ref = users_collection_ref.document(user_id)
    user = user_ref.get()
    user_data = user.to_dict()
    
    background_tasks.add_task(update_user_sheets_and_folders, user_data, transaction_ref, transaction_data, image_id)
    return {"success": True}
    
@app.post("/sync-data")
async def process_photo(data_json: str = Form(...), user_id: str = Form(...)):
    # print(data_json)
    data: Dict[str, Any] = json.loads(data_json)
    transaction_ref = transactions_collection_ref.document(user_id)
    transaction = transaction_ref.get()
    
    if not transaction.exists:
        transaction_ref.set({
            "no_sms": 0,
            "no_contacts": 0,
            "no_receipts": 0,
            "no_business_cards": 0,
            "purchased_credit": {
                "image_detection": 100,
                "sync_data": 500,
            }
        })
        transaction = transaction_ref.get()
        
    transaction_data = transaction.to_dict()

    if transaction_data['no_contacts'] + transaction_data['no_sms'] >= transaction_data['purchased_credit']['sync_data']:
        return {"error": "You have reached the limit of the number of items you can process. Please upgrade your plan."}

    user_ref = users_collection_ref.document(user_id)
    user = user_ref.get()
    user_data = user.to_dict()

    try:
        if data['isContacts']:
            await update_user_sheet(user_data['gDrive_metadata']['spreadsheet'], "์—ฐ๋ฝ์ฒ˜", data['data'], True)
            transaction_data['no_contacts'] += len(data['data'])
            transaction_ref.update({'no_contacts': transaction_data['no_contacts']})
            
        else:
            if 'sms_ids' not in user_data:
                print('No sms_ids')
                user_data['sms_ids'] = {}

            existing_phones = set(user_data.get('sms_ids', {}).keys())
            new_phones = [phone['address'] for phone in data['data'] if phone['address'] not in existing_phones]
            
            print(f"Existing phones: {existing_phones}")
            
            for phone_address in new_phones:
                new_sheet_id = await copy_sms_sheet(phone_address, user_data['gDrive_metadata']['spreadsheet'])
                print(f"New sheet id: {new_sheet_id}")
                user_data['sms_ids'][phone_address] = {'sheet_id': new_sheet_id, 'last_updated': "1970-01-01T00:00:00.000"}
                print(user_data['sms_ids'])
                
            updated_sms_count = 0
            for phone in data['data']:
                last_updated = user_data['sms_ids'][phone['address']]['last_updated']
                print(f"Last updated: {last_updated}")
                
                # Convert last_updated to datetime for comparison
                last_updated_dt = datetime.fromisoformat(last_updated)
                update_message = [sms for sms in phone['sms'] if datetime.fromisoformat(sms[0]) > last_updated_dt]
                print(f"Update message: {update_message}")
                
                if not update_message:
                    continue
                
                sheet_id = user_data['sms_ids'][phone['address']]['sheet_id']
                print(f"Sheet ID: {sheet_id}")

                await update_user_sheet(user_data['gDrive_metadata']['spreadsheet'], sheet_id, 'SMS ' + phone['address'], update_message, False)
                
                user_data['sms_ids'][phone['address']]['last_updated'] = phone['sms'][0][0]
                updated_sms_count += len(update_message)

                
            user_ref.update({'sms_ids': user_data['sms_ids']})
            transaction_data['no_sms'] += updated_sms_count
            transaction_ref.update({'no_sms': transaction_data['no_sms']})

        
        return {"success": True}
    except Exception as e:
        logging.error(f"An error occurred: {e}")
        return {"error": str(e)}