acecalisto3 commited on
Commit
cdfaefc
1 Parent(s): fa8b90b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +33 -10
app.py CHANGED
@@ -1,24 +1,47 @@
1
  from flask import Flask, request, jsonify
2
  from huggingface_hub import HfApi
 
 
3
 
4
  app = Flask(__name__)
5
  api = HfApi()
6
 
 
 
 
 
7
  @app.route('/search_datasets', methods=['GET'])
8
  def search_datasets():
9
- query = request.args.get('query')
10
- datasets = api.list_datasets(search=query, full=True)
11
- return jsonify(datasets)
 
 
 
 
 
 
 
 
 
12
 
13
  @app.route('/run_inference', methods=['POST'])
14
  def run_inference():
15
- model_id = request.json['model_id']
16
- inputs = request.json['inputs']
17
- # Assuming the model is compatible with the pipeline API
18
- from transformers import pipeline
19
- model_pipeline = pipeline(task="text-generation", model=model_id)
20
- results = model_pipeline(inputs)
21
- return jsonify(results)
 
 
 
 
 
 
 
 
22
 
23
  if __name__ == '__main__':
24
  app.run(debug=False)
 
1
  from flask import Flask, request, jsonify
2
  from huggingface_hub import HfApi
3
+ from transformers import pipeline
4
+ import logging
5
 
6
  app = Flask(__name__)
7
  api = HfApi()
8
 
9
+ # Configure logging
10
+ logging.basicConfig(level=logging.INFO)
11
+ logger = logging.getLogger()
12
+
13
  @app.route('/search_datasets', methods=['GET'])
14
  def search_datasets():
15
+ try:
16
+ query = request.args.get('query')
17
+ if not query:
18
+ logger.error("No query provided for dataset search.")
19
+ return jsonify({"error": "No query parameter provided"}), 400
20
+
21
+ logger.info(f"Searching datasets with query: {query}")
22
+ datasets = api.list_datasets(search=query, full=True)
23
+ return jsonify(datasets)
24
+ except Exception as e:
25
+ logger.error(f"Failed to search datasets: {str(e)}")
26
+ return jsonify({"error": str(e)}), 500
27
 
28
  @app.route('/run_inference', methods=['POST'])
29
  def run_inference():
30
+ try:
31
+ model_id = request.json.get('model_id')
32
+ inputs = request.json.get('inputs')
33
+
34
+ if not model_id or not inputs:
35
+ logger.error("Model ID or inputs missing in the request.")
36
+ return jsonify({"error": "Model ID or inputs missing in the request"}), 400
37
+
38
+ logger.info(f"Running inference using model: {model_id}")
39
+ model_pipeline = pipeline(task="text-generation", model=model_id)
40
+ results = model_pipeline(inputs)
41
+ return jsonify(results)
42
+ except Exception as e:
43
+ logger.error(f"Failed to run inference: {str(e)}")
44
+ return jsonify({"error": str(e)}), 500
45
 
46
  if __name__ == '__main__':
47
  app.run(debug=False)