phyloforfun commited on
Commit
f324f16
1 Parent(s): 205fb4e
vouchervision/OCR_google_cloud_vision.py CHANGED
@@ -810,7 +810,7 @@ class SafetyCheck():
810
  return credentials
811
 
812
  def check_for_inappropriate_content(self, file_stream):
813
- LEVEL = 3
814
  content = file_stream.read()
815
  image = vision.Image(content=content)
816
  response = self.client.safe_search_detection(image=image)
@@ -826,11 +826,11 @@ class SafetyCheck():
826
  )
827
  print("Safe search:")
828
 
829
- print(f"adult: {likelihood_name[safe.adult]}")
830
- print(f"medical: {likelihood_name[safe.medical]}")
831
- print(f"spoofed: {likelihood_name[safe.spoof]}")
832
- print(f"violence: {likelihood_name[safe.violence]}")
833
- print(f"racy: {likelihood_name[safe.racy]}")
834
 
835
  # Check the levels of adult, violence, racy, etc. content.
836
  if (safe.adult > LEVEL or
 
810
  return credentials
811
 
812
  def check_for_inappropriate_content(self, file_stream):
813
+ LEVEL = 2
814
  content = file_stream.read()
815
  image = vision.Image(content=content)
816
  response = self.client.safe_search_detection(image=image)
 
826
  )
827
  print("Safe search:")
828
 
829
+ print(f" adult*: {likelihood_name[safe.adult]}")
830
+ print(f" medical*: {likelihood_name[safe.medical]}")
831
+ print(f" spoofed: {likelihood_name[safe.spoof]}")
832
+ print(f" violence*: {likelihood_name[safe.violence]}")
833
+ print(f" racy: {likelihood_name[safe.racy]}")
834
 
835
  # Check the levels of adult, violence, racy, etc. content.
836
  if (safe.adult > LEVEL or