something01 commited on
Commit
1830d05
1 Parent(s): c8701df

Upload 35 files

Browse files
.gitattributes CHANGED
@@ -33,3 +33,8 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ boundbuilding.gif filter=lfs diff=lfs merge=lfs -text
37
+ da.gif filter=lfs diff=lfs merge=lfs -text
38
+ img-gen.gif filter=lfs diff=lfs merge=lfs -text
39
+ rec.gif filter=lfs diff=lfs merge=lfs -text
40
+ Resume-Screener.gif filter=lfs diff=lfs merge=lfs -text
.gitignore ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ **/.env
2
+ **/bb-env/
3
+ **/.DS_Store
4
+ **/.DS_Store/
App.css ADDED
@@ -0,0 +1,100 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /* File: App.css */
2
+
3
+ .container {
4
+ background-color: #f0f0f0;
5
+ border-radius: 15px;
6
+ box-shadow: 0 2px 4px rgba(0, 0, 0, 0.1);
7
+ }
8
+
9
+ h1 {
10
+ color: #2c3e50;
11
+ text-align: center;
12
+ margin-bottom: 30px;
13
+ }
14
+
15
+ .form-label {
16
+ font-size: 20px;
17
+ color: #8e44ad;
18
+ }
19
+
20
+ .text-muted {
21
+ color: #3498db !important;
22
+ }
23
+
24
+ .centerButton {
25
+ display: block;
26
+ margin: 0 auto 20px auto;
27
+ background-color: #e74c3c;
28
+ border-color: #e74c3c;
29
+ }
30
+
31
+ .centerButton:hover {
32
+ background-color: #c0392b;
33
+ border-color: #c0392b;
34
+ }
35
+
36
+ #backdrop {
37
+ display: flex;
38
+ justify-content: center;
39
+ align-items: center;
40
+ position: fixed;
41
+ top: 0;
42
+ left: 0;
43
+ width: 100%;
44
+ height: 100%;
45
+ background-color: rgba(0, 0, 0, 0.5);
46
+ z-index: 9999;
47
+ }
48
+
49
+ .img-fluid {
50
+ display: block;
51
+ margin: 20px auto;
52
+ max-width: 100%;
53
+ height: auto;
54
+ border: 2px solid #2c3e50;
55
+ border-radius: 10px;
56
+ }
57
+
58
+ .fancy-radio .custom-control-label::before {
59
+ border: 2px solid #007BFF;
60
+ box-shadow: 0 2px 4px rgba(0,0,0,0.1);
61
+ }
62
+
63
+ .fancy-radio .custom-control-input:checked ~ .custom-control-label::before {
64
+ background-color: #007BFF;
65
+ border-color: #007BFF;
66
+ }
67
+
68
+ /* Gradient color for the style title */
69
+ .style-title {
70
+ background: linear-gradient(45deg, #f06, #9f6);
71
+ -webkit-background-clip: text;
72
+ color: transparent;
73
+ font-size: 1.2rem;
74
+ }
75
+
76
+ /* Enhance radio button appearance */
77
+ .fancy-radio {
78
+ transition: transform 0.3s ease;
79
+ }
80
+
81
+ .fancy-radio:hover {
82
+ transform: scale(1.1);
83
+ }
84
+
85
+ /* Highlight the text input with a subtle shadow */
86
+ #formBasicEmail .form-control {
87
+ box-shadow: 0 4px 12px rgba(0, 0, 0, 0.1);
88
+ }
89
+
90
+ /* Button hover enhancement */
91
+ .btn-primary:hover {
92
+ background-color: #0070f3;
93
+ border-color: #0056b3;
94
+ }
95
+
96
+
97
+ /* Reduce space between radio buttons */
98
+ .fancy-radio + .fancy-radio {
99
+ margin-left: 10px; /* Adjust this value as per your preference */
100
+ }
App.jsx ADDED
@@ -0,0 +1,140 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import axios from 'axios';
2
+ import React, { Component } from 'react';
3
+ import './App.css';
4
+
5
+ import Button from 'react-bootstrap/Button';
6
+ import Form from 'react-bootstrap/Form';
7
+ import Spinner from 'react-bootstrap/Spinner';
8
+ import Container from 'react-bootstrap/Container';
9
+ import Image from 'react-bootstrap/Image';
10
+
11
+ class App extends Component {
12
+ state = {
13
+ isLoadingVisible: false,
14
+ val: '',
15
+ imgSrc: '',
16
+ style_preset: 'photographic' // Default to photographic
17
+ };
18
+
19
+ showLoading = () => {
20
+ this.setState({ isLoadingVisible: true });
21
+ };
22
+
23
+ hideLoading = () => {
24
+ this.setState({ isLoadingVisible: false });
25
+ };
26
+
27
+ handleChange = (e) => {
28
+ this.setState({
29
+ style_preset: e.target.value
30
+ });
31
+ };
32
+
33
+ handleSubmit = (e) => {
34
+ e.preventDefault();
35
+ this.showLoading();
36
+
37
+ const api = process.env.NODE_ENV === 'development' ? 'test/genai' : '<API-GW-ENDPOINT>'; // Replace with your API GW Endpoint
38
+ const data = {
39
+ data: e.target.searchQuery.value,
40
+ style_preset: this.state.style_preset
41
+ };
42
+
43
+ axios({
44
+ method: 'POST',
45
+ data: JSON.stringify(data),
46
+ headers: { 'Content-Type': 'application/json' },
47
+ url: api,
48
+ })
49
+ .then((response) => {
50
+ this.setState({ imgSrc: response.data.body });
51
+ setTimeout(() => {
52
+ this.hideLoading();
53
+ this.setState({ val: '' });
54
+ }, 500);
55
+ })
56
+ .catch((error) => {
57
+ console.log(error);
58
+ });
59
+ };
60
+
61
+ render() {
62
+ return (
63
+ <Container className='p-5 container' id='container' name='container'>
64
+ <h1>Unleashing Machine Learning with Amazon Bedrock</h1>
65
+ <Form onSubmit={this.handleSubmit}>
66
+ <Form.Group className='mb-3' controlId='formBasicEmail'>
67
+ <Form.Label className='form-label'>Your Words, Our Canvas: Enter Text to Create Image!</Form.Label>
68
+ <Form.Control
69
+ type='text'
70
+ placeholder='Enter text to convert image'
71
+ required
72
+ autoFocus={true}
73
+ name='searchQuery'
74
+ controlId='searchQuery'
75
+ defaultValue={this.state.val}
76
+ />
77
+ <Form.Text className='text-muted'>
78
+ We'll sketch, stretch, and kvetch until your image is a fetching fetch
79
+ </Form.Text>
80
+
81
+ {/* Title for Radio buttons */}
82
+ <p className="mt-3 font-weight-bold style-title">Select your style:</p>
83
+
84
+ {/* Radio buttons horizontally aligned with some styling */}
85
+ <div className="d-flex justify-content-between">
86
+ <Form.Check
87
+ inline
88
+ type='radio'
89
+ label='photographic'
90
+ value='photographic'
91
+ name='styleOptions'
92
+ checked={this.state.style_preset === 'photographic'}
93
+ onChange={this.handleChange}
94
+ className='fancy-radio'
95
+ />
96
+ <Form.Check
97
+ inline
98
+ type='radio'
99
+ label='digital-art'
100
+ value='digital-art'
101
+ name='styleOptions'
102
+ checked={this.state.style_preset === 'digital-art'}
103
+ onChange={this.handleChange}
104
+ className='fancy-radio'
105
+ />
106
+ <Form.Check
107
+ inline
108
+ type='radio'
109
+ label='cinematic'
110
+ value='cinematic'
111
+ name='styleOptions'
112
+ checked={this.state.style_preset === 'cinematic'}
113
+ onChange={this.handleChange}
114
+ className='fancy-radio'
115
+ />
116
+ </div>
117
+
118
+ </Form.Group>
119
+
120
+ <Button variant='primary' type='submit' className='btn btn-primary btn-large centerButton'>
121
+ Submit
122
+ </Button>
123
+
124
+ <Image id='myImage' className='img-fluid shadow-4' src={this.state.imgSrc} />
125
+ </Form>
126
+
127
+ {this.state.isLoadingVisible && (
128
+ <div id='backdrop'>
129
+ <Button variant='primary' disabled>
130
+ <Spinner as='span' animation='grow' size='sm' role='status' aria-hidden='true' />
131
+ Loading...
132
+ </Button>
133
+ </div>
134
+ )}
135
+ </Container>
136
+ );
137
+ }
138
+ }
139
+
140
+ export default App;
CODE_OF_CONDUCT.md ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ ## Code of Conduct
2
+ This project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct).
3
+ For more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact
4
+ opensource-codeofconduct@amazon.com with any additional questions or comments.
CONTRIBUTING.md ADDED
@@ -0,0 +1,59 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Contributing Guidelines
2
+
3
+ Thank you for your interest in contributing to our project. Whether it's a bug report, new feature, correction, or additional
4
+ documentation, we greatly value feedback and contributions from our community.
5
+
6
+ Please read through this document before submitting any issues or pull requests to ensure we have all the necessary
7
+ information to effectively respond to your bug report or contribution.
8
+
9
+
10
+ ## Reporting Bugs/Feature Requests
11
+
12
+ We welcome you to use the GitHub issue tracker to report bugs or suggest features.
13
+
14
+ When filing an issue, please check existing open, or recently closed, issues to make sure somebody else hasn't already
15
+ reported the issue. Please try to include as much information as you can. Details like these are incredibly useful:
16
+
17
+ * A reproducible test case or series of steps
18
+ * The version of our code being used
19
+ * Any modifications you've made relevant to the bug
20
+ * Anything unusual about your environment or deployment
21
+
22
+
23
+ ## Contributing via Pull Requests
24
+ Contributions via pull requests are much appreciated. Before sending us a pull request, please ensure that:
25
+
26
+ 1. You are working against the latest source on the *main* branch.
27
+ 2. You check existing open, and recently merged, pull requests to make sure someone else hasn't addressed the problem already.
28
+ 3. You open an issue to discuss any significant work - we would hate for your time to be wasted.
29
+
30
+ To send us a pull request, please:
31
+
32
+ 1. Fork the repository.
33
+ 2. Modify the source; please focus on the specific change you are contributing. If you also reformat all the code, it will be hard for us to focus on your change.
34
+ 3. Ensure local tests pass.
35
+ 4. Commit to your fork using clear commit messages.
36
+ 5. Send us a pull request, answering any default questions in the pull request interface.
37
+ 6. Pay attention to any automated CI failures reported in the pull request, and stay involved in the conversation.
38
+
39
+ GitHub provides additional document on [forking a repository](https://help.github.com/articles/fork-a-repo/) and
40
+ [creating a pull request](https://help.github.com/articles/creating-a-pull-request/).
41
+
42
+
43
+ ## Finding contributions to work on
44
+ Looking at the existing issues is a great way to find something to contribute on. As our projects, by default, use the default GitHub issue labels (enhancement/bug/duplicate/help wanted/invalid/question/wontfix), looking at any 'help wanted' issues is a great place to start.
45
+
46
+
47
+ ## Code of Conduct
48
+ This project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct).
49
+ For more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact
50
+ opensource-codeofconduct@amazon.com with any additional questions or comments.
51
+
52
+
53
+ ## Security issue notifications
54
+ If you discover a potential security issue in this project we ask that you notify AWS/Amazon Security via our [vulnerability reporting page](http://aws.amazon.com/security/vulnerability-reporting/). Please do **not** create a public github issue.
55
+
56
+
57
+ ## Licensing
58
+
59
+ See the [LICENSE](LICENSE) file for our project's licensing. We will ask you to confirm the licensing of your contribution.
LICENSE ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ MIT No Attribution
2
+
3
+ Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy of
6
+ this software and associated documentation files (the "Software"), to deal in
7
+ the Software without restriction, including without limitation the rights to
8
+ use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
9
+ the Software, and to permit persons to whom the Software is furnished to do so.
10
+
11
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
12
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
13
+ FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
14
+ COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
15
+ IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
16
+ CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
17
+
README.md CHANGED
@@ -1,12 +1,69 @@
1
- ---
2
- title: Llm Rag Vectordb Python
3
- emoji: 🐢
4
- colorFrom: yellow
5
- colorTo: purple
6
- sdk: gradio
7
- sdk_version: 4.14.0
8
- app_file: app.py
9
- pinned: false
10
- ---
11
-
12
- Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Building Bonds: The Power of Ice-Breakers
2
+
3
+ ![Application Banner](/llm-rag-vectordb-python/building-bonds/boundbuilding.gif)
4
+
5
+ Welcome to **Building Bonds**, a Streamlit application that harnesses the strengths of Amazon Bedrock and LangChain. Make your introductions more memorable! Enter a name, and let our application search for their LinkedIn profile, then provide you with a concise summary and ice-breaking facts about that person.
6
+
7
+ ## Features
8
+
9
+ 1. **Instant LinkedIn Search**: Just provide a name, and the application will try to locate their LinkedIn profile from the internet.
10
+ 2. **Automated Summary**: With the capabilities of Amazon Bedrock and LangChain, receive a detailed overview of the person's career and accomplishments.
11
+ 3. **Ice-Breaker Facts**: Start your conversation with a bang! Learn unique and engaging facts related to the individual.
12
+
13
+ ## How It Works
14
+
15
+ The magic behind **Building Bonds**:
16
+
17
+ - **Amazon Bedrock**: Empowers our system to deep dive into data and bring out meaningful insights.
18
+ - **LangChain**: Assists with linguistic processing, allowing the app to draw a clear and engaging summary from LinkedIn details.
19
+
20
+ ## Getting Started
21
+
22
+ ### **1. Pre-requisites**
23
+
24
+ - Clone the repository to your local machine.
25
+ - Create a `.env` file in the project directory using `env.example` as a reference. Populate the `.env` file with your Proxycurl and Serpa API Key details:
26
+
27
+ ```bash
28
+ PROXYCURL_API_KEY=<YOUR API KEY>
29
+ SERPAPI_API_KEY=<YOUR API KEY>
30
+ ```
31
+
32
+ ### **2. Setting Up a Virtual Environment**
33
+
34
+ Use `virtualenv` to create an isolated Python environment:
35
+
36
+ 1. Install `virtualenv`:
37
+ ```bash
38
+ pip install virtualenv
39
+ ```
40
+
41
+ 2. Navigate to the directory where you cloned the repository.
42
+
43
+ 3. Initialize the virtual environment:
44
+ ```bash
45
+ virtualenv bb-env
46
+ ```
47
+
48
+ 4. Activate the environment:
49
+ ```bash
50
+ source bb-env/bin/activate
51
+ ```
52
+
53
+ ### **3. Installing Dependencies**
54
+
55
+ With your virtual environment active, install the necessary packages:
56
+
57
+ ```bash
58
+ pip install -r requirements.txt
59
+ ```
60
+
61
+ This command installs all dependencies from the `requirements.txt` file into your `rs-env` environment.
62
+
63
+ ### **4. Usage**
64
+
65
+ Launch the application using Streamlit:
66
+
67
+ ```bash
68
+ streamlit run app.py
69
+ ```
Resume-Screener.gif ADDED

Git LFS Details

  • SHA256: 60329b69c7b7c7748b343bcd2eb79e129cb7c62548352034f33ecf75fe35f7e4
  • Pointer size: 133 Bytes
  • Size of remote file: 33.6 MB
__init__.py ADDED
File without changes
app.py ADDED
@@ -0,0 +1,76 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+
3
+ from langchain.prompts import PromptTemplate
4
+ from langchain.chains import LLMChain
5
+ from third_parties.linkedin import scrape_linkedin_profile
6
+ from agents.linkedin_lookup_agent import lookup as linedin_lookup_agent
7
+ from langchain.llms.bedrock import Bedrock
8
+
9
+
10
+ def get_llm():
11
+ bedrock_llm = Bedrock(model_id="anthropic.claude-v2",
12
+ model_kwargs={"temperature": 0.1, "max_tokens_to_sample": 4096})
13
+ return bedrock_llm
14
+
15
+
16
+ def ice_break_with(name: str):
17
+ linkedin_profile_url = linedin_lookup_agent(name=name)
18
+ linkedin_data = scrape_linkedin_profile(linkedin_profile_url=linkedin_profile_url)
19
+
20
+ summary_template = """
21
+ Given the LinkedIN information {information} about a person from, I wish to create the following:
22
+ 1. A short Summary
23
+ 2. Two interesting facts about them
24
+ """
25
+
26
+ summary_prompt_template = PromptTemplate(
27
+ input_variables=["information"],
28
+ template=summary_template,
29
+ )
30
+
31
+ llm = get_llm()
32
+ chain = LLMChain(llm=llm, prompt=summary_prompt_template)
33
+
34
+ result = chain.run(information=linkedin_data)
35
+ return result
36
+
37
+
38
+ def main():
39
+
40
+ st.title('Building Bonds: The Power of Ice-Breakers 💼✨')
41
+ st.write('An app that uses Amazon Bedrock and LangChain to create summaries based on their social media profile. 🚀')
42
+
43
+ st.sidebar.header("🔎 Enter the person's details")
44
+ name = st.sidebar.text_input("Name (e.g., 'Andy Jassy Amazon'):")
45
+
46
+ if st.sidebar.button('Get Summary'):
47
+ with st.spinner('Fetching LinkedIn data and creating summary... 🔄'):
48
+ result = ice_break_with(name)
49
+ st.subheader(f'Summary and couple of interesting facts 📝')
50
+ st.write(result)
51
+ st.success('Summary generated successfully! 👍')
52
+
53
+ st.markdown(
54
+ "<h3 style='text-align: center; font-size: 20px;'> To know more about Amazon Bedrock, visit <a href='https://aws.amazon.com/bedrock/' target='_blank'>here</a> </h3>",
55
+ unsafe_allow_html=True
56
+ )
57
+ # Styling the Streamlit page
58
+ st.markdown("""
59
+ <style>
60
+ body {
61
+ color: #4f4f4f;
62
+ background-color: #F5F5F5;
63
+ }
64
+ .stButton>button {
65
+ color: #4f4f4f;
66
+ background-color: #FFD700;
67
+ border-radius: 30px;
68
+ padding: 10px 20px;
69
+ font-size: 1.2em;
70
+ }
71
+ </style>
72
+ """, unsafe_allow_html=True)
73
+
74
+
75
+ if __name__ == "__main__":
76
+ main()
boundbuilding.gif ADDED

Git LFS Details

  • SHA256: dbfa683050610ea0bde7ae1fce46a4b0a63a934c1c37e2ecdddce40e37414a4e
  • Pointer size: 132 Bytes
  • Size of remote file: 3.19 MB
da.gif ADDED

Git LFS Details

  • SHA256: c4ecb7c1cf1bbc75c3512336518fafdc6f12a4aa04660ff64ff27c4d0661faa0
  • Pointer size: 132 Bytes
  • Size of remote file: 7.25 MB
env.example ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ HUGGINFACE_HUB_API_TOKEN='YOUR_API_KEY'
2
+ PGVECTOR_DRIVER='psycopg2'
3
+ PGVECTOR_USER='<<USERNAME>>'
4
+ PGVECTOR_PASSWORD='<<PASSWORD>>'
5
+ PGVECTOR_HOST='<<AURORA-DB-CLUSTER-HOST>>'
6
+ PGVECTOR_PORT=5432
7
+ PGVECTOR_DATABASE='<<DBNAME>>'
favicon.ico ADDED
img-gen.gif ADDED

Git LFS Details

  • SHA256: 73eaed38837c58e71ae0a008d5ecf8b18b5537ac4f851bd678e2d6ac45398e1b
  • Pointer size: 132 Bytes
  • Size of remote file: 6.14 MB
index-bk.css ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #backdrop {
2
+ position: absolute;
3
+ top: 0;
4
+ left: 0;
5
+ width: 100vw;
6
+ height: 100vh;
7
+ z-index: 999;
8
+ background-color: rgba(0, 0, 0, 0.262);
9
+ text-align: center;
10
+ }
11
+ #backdrop button {
12
+ top: calc(50% - (58px / 2));
13
+ right: calc(50% - (58px / 2));
14
+ display: block;
15
+ position: fixed;
16
+ }
17
+ #myImage {
18
+ max-inline-size: '100%';
19
+ block-size: 'auto';
20
+ }
index.css ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #backdrop {
2
+ position: absolute;
3
+ top: 0;
4
+ left: 0;
5
+ width: 100vw;
6
+ height: 100vh;
7
+ z-index: 999;
8
+ background-color: rgba(0, 0, 0, 0.262);
9
+ text-align: center;
10
+ }
11
+ #backdrop button {
12
+ top: calc(50% - (58px / 2));
13
+ right: calc(50% - (58px / 2));
14
+ display: block;
15
+ position: fixed;
16
+ }
17
+ #myImage {
18
+ max-inline-size: '100%';
19
+ block-size: 'auto';
20
+ }
index.html ADDED
@@ -0,0 +1,43 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <!DOCTYPE html>
2
+ <html lang="en">
3
+ <head>
4
+ <meta charset="utf-8" />
5
+ <link rel="icon" href="%PUBLIC_URL%/favicon.ico" />
6
+ <meta name="viewport" content="width=device-width, initial-scale=1" />
7
+ <meta name="theme-color" content="#000000" />
8
+ <meta
9
+ name="description"
10
+ content="Web site created using create-react-app"
11
+ />
12
+ <link rel="apple-touch-icon" href="%PUBLIC_URL%/logo192.png" />
13
+ <!--
14
+ manifest.json provides metadata used when your web app is installed on a
15
+ user's mobile device or desktop. See https://developers.google.com/web/fundamentals/web-app-manifest/
16
+ -->
17
+ <link rel="manifest" href="%PUBLIC_URL%/manifest.json" />
18
+ <!--
19
+ Notice the use of %PUBLIC_URL% in the tags above.
20
+ It will be replaced with the URL of the `public` folder during the build.
21
+ Only files inside the `public` folder can be referenced from the HTML.
22
+
23
+ Unlike "/favicon.ico" or "favicon.ico", "%PUBLIC_URL%/favicon.ico" will
24
+ work correctly both with client-side routing and a non-root public URL.
25
+ Learn how to configure a non-root public URL by running `npm run build`.
26
+ -->
27
+ <title>React App</title>
28
+ </head>
29
+ <body>
30
+ <noscript>You need to enable JavaScript to run this app.</noscript>
31
+ <div id="root"></div>
32
+ <!--
33
+ This HTML file is a template.
34
+ If you open it directly in the browser, you will see an empty page.
35
+
36
+ You can add webfonts, meta tags, or analytics to this file.
37
+ The build step will place the bundled scripts into the <body> tag.
38
+
39
+ To begin the development, run `npm start` or `yarn start`.
40
+ To create a production bundle, use `npm run build` or `yarn build`.
41
+ -->
42
+ </body>
43
+ </html>
index.js ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import React from 'react';
2
+ import ReactDOM from 'react-dom/client';
3
+
4
+ // Bootstrap CSS
5
+ import "bootstrap/dist/css/bootstrap.min.css";
6
+ // Bootstrap Bundle JS
7
+ import "bootstrap/dist/js/bootstrap.bundle.min";
8
+
9
+ import './index.css';
10
+ import App from './App';
11
+ import reportWebVitals from './reportWebVitals';
12
+
13
+ const root = ReactDOM.createRoot(document.getElementById('root'));
14
+ root.render(
15
+ <React.StrictMode>
16
+ <App />
17
+ </React.StrictMode>
18
+ );
19
+
20
+ // If you want to start measuring performance in your app, pass a function
21
+ // to log results (for example: reportWebVitals(console.log))
22
+ // or send to an analytics endpoint. Learn more: https://bit.ly/CRA-vitals
23
+ reportWebVitals();
lambda_function.py ADDED
@@ -0,0 +1,101 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import boto3
2
+ import uuid
3
+ import base64
4
+ import io
5
+ import json
6
+ import boto3
7
+ import os
8
+ from PIL import Image
9
+
10
+ s3 = boto3.resource('s3', region_name='us-east-1')
11
+ bucket_name = os.environ['OUT_S3_BUCKET_NAME']
12
+
13
+ s3_client = boto3.client('s3', region_name='us-east-1')
14
+ session = boto3.Session()
15
+
16
+
17
+ def get_bedrock_llm():
18
+ bedrock_client = session.client(service_name="bedrock-runtime")
19
+
20
+ return bedrock_client
21
+
22
+ def query_endpoint_bedrock(prompt, style_preset):
23
+ bedrock_client = get_bedrock_llm()
24
+
25
+ negative_prompts = [
26
+ "poorly rendered",
27
+ "poor background details",
28
+ "poorly drawn mountains",
29
+ "disfigured mountain features",
30
+ ]
31
+
32
+ request = json.dumps({
33
+ "text_prompts": (
34
+ [{"text": prompt, "weight": 1.0}]
35
+ + [{"text": negprompt, "weight": -1.0} for negprompt in negative_prompts]
36
+ ),
37
+ "cfg_scale": 5,
38
+ "seed": 5450,
39
+ "steps": 70,
40
+ "style_preset": style_preset,
41
+ })
42
+
43
+ modelId = "stability.stable-diffusion-xl"
44
+
45
+ response = bedrock_client.invoke_model(body=request, modelId=modelId)
46
+
47
+ return response
48
+
49
+ def parse_response(query_response):
50
+ response_body = json.loads(query_response.get("body").read())
51
+
52
+ base_64_img_str = response_body["artifacts"][0].get("base64")
53
+
54
+ return base_64_img_str
55
+
56
+ def upload_image(base_64_img_str, prmpt):
57
+
58
+ # Decode your image
59
+ image_data = io.BytesIO(base64.decodebytes(bytes(base_64_img_str, "utf-8")))
60
+ image = Image.open(image_data)
61
+
62
+ # Prepare the image for upload to S3
63
+ buffered_image = io.BytesIO()
64
+ image.save(buffered_image, format="PNG")
65
+
66
+ image_name = prmpt+str(uuid.uuid4())+'.png'
67
+
68
+ buffered_image.seek(0)
69
+ s3.Object(bucket_name, image_name).put(
70
+ Body=buffered_image, ContentType='image/png')
71
+ return s3_client.generate_presigned_url(ClientMethod='get_object', Params={'Bucket': bucket_name, 'Key': image_name}, ExpiresIn=1000)
72
+
73
+
74
+ def lambda_handler(event, context):
75
+ print("Received event: "+json.dumps(event, indent=2))
76
+ data = json.loads(json.dumps(event))
77
+ text = data['data']
78
+ style_preset= data['style_preset']
79
+ # print(text)
80
+ response = query_endpoint_bedrock(text, style_preset)
81
+ base_64_img_str = parse_response(response)
82
+
83
+ # Display hallucinated image
84
+ url = upload_image(base_64_img_str, text)
85
+
86
+ return {
87
+ 'statusCode': 200,
88
+ 'headers': {
89
+ 'Access-Control-Allow-Headers': 'Content-Type',
90
+ 'Access-Control-Allow-Origin': '*',
91
+ 'Access-Control-Allow-Methods': 'OPTIONS,POST,GET'
92
+ },
93
+ 'body': url
94
+ }
95
+
96
+
97
+ event = {
98
+ "data": "Samosa",
99
+ "style_preset": "photographic"
100
+ }
101
+ print(lambda_handler(event, context=None))
linkedin.py ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import requests
3
+
4
+
5
+ def scrape_linkedin_profile(linkedin_profile_url):
6
+ """scrape information from LinkedIn profiles,
7
+ Manually scrape the information from the LinkedIn profile"""
8
+ api_endpoint = "https://nubela.co/proxycurl/api/v2/linkedin"
9
+ header_dic = {"Authorization": f'Bearer {os.environ.get("PROXYCURL_API_KEY")}'}
10
+
11
+ # For production
12
+ response = requests.get(
13
+ api_endpoint, params={"url": linkedin_profile_url}, headers=header_dic
14
+ )
15
+
16
+ # # For test and development
17
+ # response = requests.get(
18
+ # "https://gist.githubusercontent.com/debnsuma/07afaf3939dcc2b5cc404de58016fdd2/raw/9e4b8f942364f6ee9759d1cdb1b6d7b8078ceb4e/suman.json"
19
+ # )
20
+
21
+ data = response.json()
22
+ data = {
23
+ k: v
24
+ for k, v in data.items()
25
+ if v not in ([], "", "", None)
26
+ and k
27
+ not in [
28
+ "people_also_viewed",
29
+ "certifications",
30
+ "accomplishment_publications",
31
+ "accomplishment_honors_awards",
32
+ "accomplishment_projects",
33
+ ]
34
+ }
35
+ if data.get("groups"):
36
+ for group_dict in data.get("groups"):
37
+ group_dict.pop("profile_pic_url")
38
+
39
+ return data
linkedin_lookup_agent.py ADDED
@@ -0,0 +1,46 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from tools.tools import get_profile_url
2
+ from dotenv import load_dotenv
3
+ from langchain import PromptTemplate
4
+ from langchain.llms.bedrock import Bedrock
5
+ from langchain.agents import initialize_agent, Tool, AgentType
6
+ import re
7
+
8
+
9
+
10
+
11
+ def get_llm():
12
+
13
+ bedrock_llm = Bedrock(model_id="anthropic.claude-v2",
14
+ model_kwargs={"temperature": 0.1,"max_tokens_to_sample": 4096})
15
+
16
+ return bedrock_llm
17
+
18
+ def lookup(name: str) -> str:
19
+ load_dotenv()
20
+
21
+ template = """given the full name {name_of_person} I want you to get it me a link to their Linkedin profile page.
22
+ Your answer should contain only a URL of the LinkedIN profile"""
23
+
24
+ tools_for_agent = [
25
+ Tool(
26
+ name="Crawl Google 4 linkedin profile page",
27
+ func=get_profile_url,
28
+ description="useful for when you need get the Linkedin Page URL",
29
+ ),
30
+ ]
31
+ llm = get_llm()
32
+ agent_chain = initialize_agent(tools_for_agent, llm, agent=AgentType.ZERO_SHOT_REACT_DESCRIPTION, verbose=True)
33
+
34
+ prompt_template = PromptTemplate(
35
+ input_variables=["name_of_person"], template=template
36
+ )
37
+
38
+ # Get the LLM's output
39
+ try:
40
+ linkedin_username = agent_chain.run(handle_parsing_errors=True, input=prompt_template.format_prompt(name_of_person=name))
41
+
42
+ except ValueError as e:
43
+ print("Error while parsing LLM output:", e)
44
+ return None
45
+
46
+ return linkedin_username
manifest.json ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "short_name": "React App",
3
+ "name": "Create React App Sample",
4
+ "icons": [
5
+ {
6
+ "src": "favicon.ico",
7
+ "sizes": "64x64 32x32 24x24 16x16",
8
+ "type": "image/x-icon"
9
+ },
10
+ {
11
+ "src": "logo192.png",
12
+ "type": "image/png",
13
+ "sizes": "192x192"
14
+ },
15
+ {
16
+ "src": "logo512.png",
17
+ "type": "image/png",
18
+ "sizes": "512x512"
19
+ }
20
+ ],
21
+ "start_url": ".",
22
+ "display": "standalone",
23
+ "theme_color": "#000000",
24
+ "background_color": "#ffffff"
25
+ }
package-lock.json ADDED
The diff for this file is too large to render. See raw diff
 
package.json ADDED
@@ -0,0 +1,44 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "name": "mygen-ai-app",
3
+ "version": "0.1.0",
4
+ "private": true,
5
+ "dependencies": {
6
+ "@testing-library/jest-dom": "^5.17.0",
7
+ "@testing-library/react": "^13.4.0",
8
+ "@testing-library/user-event": "^13.5.0",
9
+ "axios": "^1.5.0",
10
+ "bootstrap": "^5.3.2",
11
+ "http-proxy-middleware": "^2.0.6",
12
+ "react": "^18.2.0",
13
+ "react-bootstrap": "^2.8.0",
14
+ "react-dom": "^18.2.0",
15
+ "react-icons": "^4.11.0",
16
+ "react-scripts": "5.0.1",
17
+ "styled-components": "^6.0.8",
18
+ "web-vitals": "^2.1.4"
19
+ },
20
+ "scripts": {
21
+ "start": "react-scripts start",
22
+ "build": "react-scripts build",
23
+ "test": "react-scripts test",
24
+ "eject": "react-scripts eject"
25
+ },
26
+ "eslintConfig": {
27
+ "extends": [
28
+ "react-app",
29
+ "react-app/jest"
30
+ ]
31
+ },
32
+ "browserslist": {
33
+ "production": [
34
+ ">0.2%",
35
+ "not dead",
36
+ "not op_mini all"
37
+ ],
38
+ "development": [
39
+ "last 1 chrome version",
40
+ "last 1 firefox version",
41
+ "last 1 safari version"
42
+ ]
43
+ }
44
+ }
preview.png ADDED
rec.gif ADDED

Git LFS Details

  • SHA256: 8dd7d40de84d279f64e8781915c97eb67c7a07afc2f21a0e15f24becd7b92070
  • Pointer size: 133 Bytes
  • Size of remote file: 25.7 MB
reportWebVitals.js ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ const reportWebVitals = onPerfEntry => {
2
+ if (onPerfEntry && onPerfEntry instanceof Function) {
3
+ import('web-vitals').then(({ getCLS, getFID, getFCP, getLCP, getTTFB }) => {
4
+ getCLS(onPerfEntry);
5
+ getFID(onPerfEntry);
6
+ getFCP(onPerfEntry);
7
+ getLCP(onPerfEntry);
8
+ getTTFB(onPerfEntry);
9
+ });
10
+ }
11
+ };
12
+
13
+ export default reportWebVitals;
requirements.txt ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ streamlit==1.24.0
2
+ tiktoken==0.5.1
3
+ unstructured==0.10.16
4
+ langchain
5
+ PyPDF2==3.0.1
6
+ python-dotenv==1.0.0
7
+ psycopg2-binary==2.9.6
8
+ altair==4.0.0
9
+ urllib3==1.26.17
10
+ boto3==1.28.61
11
+ google-search-results==2.4.2
robots.txt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ # https://www.robotstxt.org/robotstxt.html
2
+ User-agent: *
3
+ Disallow:
sample_job_description.txt ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ DESCRIPTION
2
+ The Amazon Maps Data (AMD) team curates geospatial map data that represents the physical world at a fidelity that enables safe and efficient logistics for last mile operations at Amazon. This data powers Amazon Maps used in more than 10 million trips and 15 million kilometers of driving by last mile Amazon drivers every day.
3
+ The team builds geospatial big data pipelines that ingest raw map data from multiple heterogenous sources and fuses them into one single map using machine learning models and graph algorithms. The machine learning models range from regression models to map-match map elements to deep learning models that align maps features consumed from different sources.
4
+ Artifacts generated from this data is used in (i) shortest path computations for planning of delivery routes, (i) providing turn-by-turn navigation instructions to drivers, (iii) displaying the map visuals on the Amazon Flex app used by last mile drivers, and (iv) planning delivery sectors within the jurisdiction of an Amazon delivery station. Beside extensive usage of AWS cloud services, the team builds software using bleeding edge big data technologies like GeoSpark and GraphX.
5
+
6
+ We are open to hiring candidates to work out of one of the following locations: Bellevue, WA, USA
7
+
8
+ BASIC QUALIFICATIONS
9
+ - 3+ years of non-internship professional software development experience
10
+ - 2+ years of non-internship design or architecture (design patterns, reliability and scaling) of new and existing systems experience
11
+ - Experience programming with at least one software programming language
12
+ - 3+ years of full software development life cycle, including coding standards, code reviews, source control management, build processes, testing, and operations experience
13
+
14
+ PREFERRED QUALIFICATIONS
15
+ - Bachelor's degree in computer science or equivalent
16
+ - Experience in machine learning, data mining, information retrieval, statistics or natural language processing
setupProxy.js ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+
2
+ const { createProxyMiddleware } = require('http-proxy-middleware');
3
+ const proxy = {
4
+ target: 'https://q45f9ormi0.execute-api.us-east-1.amazonaws.com/',
5
+ changeOrigin: true,
6
+ };
7
+ module.exports = function (app) {
8
+ app.use('/test', createProxyMiddleware(proxy));
9
+ };
setupTests.js ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ // jest-dom adds custom jest matchers for asserting on DOM nodes.
2
+ // allows you to do things like:
3
+ // expect(element).toHaveTextContent(/react/i)
4
+ // learn more: https://github.com/testing-library/jest-dom
5
+ import '@testing-library/jest-dom';
suman.json ADDED
@@ -0,0 +1,486 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "public_identifier": "suman-d",
3
+ "profile_pic_url": "https://s3.us-west-000.backblazeb2.com/proxycurl/person/suman-d/profile?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=0004d7f56a0400b0000000001%2F20230825%2Fus-west-000%2Fs3%2Faws4_request&X-Amz-Date=20230825T162928Z&X-Amz-Expires=3600&X-Amz-SignedHeaders=host&X-Amz-Signature=d0a44cdf5238f916130cf62aaf4a6e70c39aa0eb4b2030d6eed2c4fd0e57562d",
4
+ "background_cover_image_url": null,
5
+ "first_name": "Suman",
6
+ "last_name": "Debnath ",
7
+ "full_name": "Suman Debnath",
8
+ "follower_count": 9616,
9
+ "occupation": "Principal Developer Advocate, Data Engineering and Analytics at Amazon Web Services (AWS)",
10
+ "headline": "Transforming lazily, Data Engineering @ Amazon Web Services",
11
+ "summary": "\\u2022\\tPreviously worked as Technical Lead focusing on QA, Automation, Performance and Tool Development efforts towards the NVMe Over Fabrics engineering solution.\\n\\u2022\\tWorked as a Staff Engineer for Broadcom with the Performance Benchmarking and Technical Marketing group\\n\\u2022\\tCompTIA Storage+ SME, responsible to develop the question set for the SNIA Certified Storage Professional certification examination. \\n\\u2022\\tStorage Professional, mostly focusing on Performance Engineering, System Testing, Quality Engineering and Solutioning. Worked with clients to evaluate technologies. Conduct Assessments/ Workshops/PoC, provide solution architecture\\n\\u2022\\tStorage and system performance engineer, focusing on raw and application based performance benchmarking (like Oracle, MS SQL Server, MS Exchange Server, Storage Migration, Bootstrom, etc.) \\n\\u2022\\tStorage QE professional, expertise on system, regression and performance testing for various storage systems, especially on distributed storage systems, like IBM SVC and EMC VPLEX\\n\\u2022\\tLeveraged open source tools, like Vagrant, Ansible and Docker to improve the test efficiency \\n\\u2022\\tExpertise in delivering technical training & workshops (EMC VPLEX, IBM SVC, IBM V7000 Storwize, Customized Workshop)\\n\\u2022\\tAuthor of more than 30 white papers, published for Broadcom, IBM and Hitachi Data Systems\\n\\u2022\\tFrequent speaker at various national and international conference like, SNIA, GHCI and PyConf \\n\\u2022\\tDeveloped Performance Monitoring tool for Brocade FC switches \\n\\u2022\\tDeveloped Tool for Measuring VM Bootstorm performance benchmarking \\n\\u2022\\tGithub - https://github.com/suman-d",
12
+ "country": "US",
13
+ "country_full_name": "United States of America",
14
+ "city": "Boston",
15
+ "state": "Massachusetts",
16
+ "experiences": [{
17
+ "starts_at": {
18
+ "day": 1,
19
+ "month": 7,
20
+ "year": 2022
21
+ },
22
+ "ends_at": null,
23
+ "company": "Amazon Web Services (AWS)",
24
+ "company_linkedin_profile_url": "https://www.linkedin.com/company/amazon-web-services/",
25
+ "title": "Principal Developer Advocate, Data Engineering and Analytics",
26
+ "description": null,
27
+ "location": "Boston, Massachusetts, United States",
28
+ "logo_url": "https://media.licdn.com/dms/image/C560BAQER_QnUTXrPJw/company-logo_400_400/0/1670264050886?e=1700697600&v=beta&t=DicqTgSOApjcw6M2A27i8tCXwAo3s8o37qQro1v859s"
29
+ }, {
30
+ "starts_at": {
31
+ "day": 1,
32
+ "month": 1,
33
+ "year": 2022
34
+ },
35
+ "ends_at": {
36
+ "day": 1,
37
+ "month": 7,
38
+ "year": 2022
39
+ },
40
+ "company": "Amazon Web Services (AWS)",
41
+ "company_linkedin_profile_url": "https://www.linkedin.com/company/amazon-web-services/",
42
+ "title": "Principal, Amazon Elastic File System (Amazon EFS) ",
43
+ "description": null,
44
+ "location": "Boston, Massachusetts, United States",
45
+ "logo_url": "https://media.licdn.com/dms/image/C560BAQER_QnUTXrPJw/company-logo_400_400/0/1670264050886?e=1700697600&v=beta&t=DicqTgSOApjcw6M2A27i8tCXwAo3s8o37qQro1v859s"
46
+ }, {
47
+ "starts_at": {
48
+ "day": 1,
49
+ "month": 7,
50
+ "year": 2019
51
+ },
52
+ "ends_at": {
53
+ "day": 1,
54
+ "month": 12,
55
+ "year": 2021
56
+ },
57
+ "company": "Amazon Web Services (AWS)",
58
+ "company_linkedin_profile_url": "https://www.linkedin.com/company/amazon-web-services/",
59
+ "title": "Principal Developer Advocate",
60
+ "description": "Worked with the Global Developer Relations Team, focusing on core services, serverless and machine learning.\\n\\u2022\\tHosted 120+ third party conference and meetups in last 2 years focusing on various technology stack around Serverless, Machine Learning and Data Analysis. \\n\\u2022\\tPresented at Tier 1 AWS events like re:Invent, AWS Innovate, AWS Modern Application Development, Amazon AI Innovate, AWS DevDay, etc. Hosted \\u201cAWS on Air\\u201d show at re:Invent. \\n\\u2022\\tDeveloped developer focused content around Machine Learning and Serverless. \\n\\u2022\\tScaled the number of AWS User Group from 17 to 28 within two year in the region(India/SL/Bangladesh)",
61
+ "location": "Bangalore Urban, Karnataka, India",
62
+ "logo_url": "https://media.licdn.com/dms/image/C560BAQER_QnUTXrPJw/company-logo_400_400/0/1670264050886?e=1700697600&v=beta&t=DicqTgSOApjcw6M2A27i8tCXwAo3s8o37qQro1v859s"
63
+ }, {
64
+ "starts_at": {
65
+ "day": 1,
66
+ "month": 5,
67
+ "year": 2017
68
+ },
69
+ "ends_at": {
70
+ "day": 1,
71
+ "month": 1,
72
+ "year": 2019
73
+ },
74
+ "company": "Toshiba America Electronic Components, Inc.",
75
+ "company_linkedin_profile_url": "https://www.linkedin.com/company/toshiba-america-electronic-components-inc./",
76
+ "title": "Project Leader",
77
+ "description": "Lead the Performance QA, Quality Engineering efforts for Toshiba NVMe over Fabric(KumoScale) Engineering solution \\n\\u2022\\tMentor new hire and train the team with the technology and product \\n\\u2022\\tDeveloped performance regression testing suite and reporting dashboard\\n\\u2022\\tDeveloped automated test suit for REST API \\n\\u2022\\tPerformance benchmarking for various data center application (stand-alone and container based)\\n\\u2022\\tPerforming POC and Demos on NVMeoF solution and Docker/Kubernetes\\n\\u2022\\tParticipating as a speaker at various conferences like Flash Memory Summit, PyCon, SNIA, OpenSource for you, etc. \\n\\u2022\\tDevelop technical content wiki, white-papers and solution briefs",
78
+ "location": "Pune, Maharashtra, India",
79
+ "logo_url": "https://media.licdn.com/dms/image/C4E0BAQGHe97KBeyd5g/company-logo_400_400/0/1536870926328?e=1700697600&v=beta&t=UfC9aZxHjQ-1bWbA5gsp7WKxwmHwxSp4F7GO3yNfJlc"
80
+ }, {
81
+ "starts_at": {
82
+ "day": 1,
83
+ "month": 1,
84
+ "year": 2016
85
+ },
86
+ "ends_at": {
87
+ "day": 1,
88
+ "month": 4,
89
+ "year": 2017
90
+ },
91
+ "company": "Broadcom Inc.",
92
+ "company_linkedin_profile_url": "https://www.linkedin.com/company/broadcom/",
93
+ "title": "Principal Performance Engineer",
94
+ "description": "Worked with the Performance Engineering and Technical Marketing Group (Emulex Business Unit) focusing on FC HBA performance\\ncharacterization, PoC and benchmarking with various Flash Storage Systems.\\n\\u2022\\tIndustry Performance Benchmarking \\u2013 TPC-H, TPC-C and SPECsfs\\n\\u2022\\tPerformance characterization of various workload for 8Gb, 16Gb and 32Gb FC cards\\n\\u2022\\tBenchmarking for FC card\\u2019s QoS(Quality of Service)\\n\\u2022\\tWriting blogs/technical performance brief papers for Engineering, Support and Product Marketing\\n\\u2022\\tPerformance Benchmarking for SQL Server 2016 database with FC HBAs\\n\\u2022\\tPerformance Benchmarking for Storage Migration(HyperV, ESX, and Citrix) over FC for the Gen6 32Gbps HBA adapters\\n\\u2022\\tBenchmarking/PoC for various flash arrays from different vendors, like, Dell, EMC, Violin, XtremIO, Kaminario, Pure, Nimble, etc.",
95
+ "location": "Bangalore Urban, Karnataka, India",
96
+ "logo_url": "https://media.licdn.com/dms/image/C4E0BAQGDwy-AGAu-Hg/company-logo_400_400/0/1519856412341?e=1700697600&v=beta&t=NaJA5QQY4GcmEcPgIcDyZ3hf8HLoQg2pcW2cuMEjxgU"
97
+ }, {
98
+ "starts_at": {
99
+ "day": 1,
100
+ "month": 2,
101
+ "year": 2015
102
+ },
103
+ "ends_at": {
104
+ "day": 1,
105
+ "month": 1,
106
+ "year": 2016
107
+ },
108
+ "company": "NetApp",
109
+ "company_linkedin_profile_url": "https://www.linkedin.com/company/netapp/",
110
+ "title": "Member of Technical Staff",
111
+ "description": "Worked with the Performance Products Group (PPG) focusing on validation and verification of various NetApp products.\\n\\u2022\\tValidating Service Processor security aspects for encryption(FIPS Qualification)\\n\\u2022\\tPerformance Analysis for various workload, OLTP, DSS, etc. \\n\\u2022\\tPerformance Regression test design\\n\\u2022\\tWork with the Information Engineering team for field documents",
112
+ "location": "Bangalore Urban, Karnataka, India",
113
+ "logo_url": "https://media.licdn.com/dms/image/C560BAQH78XSLHqxMOQ/company-logo_400_400/0/1599748887845?e=1700697600&v=beta&t=9l-RqO7O2KbdLwfkVSArft0uHoxNhewRRi3C90_NR7E"
114
+ }, {
115
+ "starts_at": {
116
+ "day": 1,
117
+ "month": 1,
118
+ "year": 2013
119
+ },
120
+ "ends_at": {
121
+ "day": 1,
122
+ "month": 12,
123
+ "year": 2014
124
+ },
125
+ "company": "EMC",
126
+ "company_linkedin_profile_url": "https://www.linkedin.com/company/dellemc/",
127
+ "title": "Senior Software Engineer",
128
+ "description": "Worked for EMC Private Cloud workforce with VPLEX Engineering.\\n\\u2022\\tSystem testing for features, failure, DR for VPLEX \\n\\u2022\\tDesign, development and writing of system test case for core features and regression.\\n\\u2022\\tCustomer Demos showcasing HA and Continuous Availability with VPLEX\\n\\u2022\\tDrive scoping of overall test effort for various new functionalities in a release and assist functional team leads for test scoping in their individual areas\\n\\u2022\\tProvide technical whiteboard sessions to engineering team on critical functionalities in the product\\n\\u2022\\tAdhere to SCRUM process during the product development phase\\n\\u2022\\tFocusing on IO Performance Regression efforts for VPLEX for different releases\\n\\u2022\\tRegression bisection and performance defect triage\\n\\u2022\\tBenchmark and performance characterization of different VPLEX configurations",
129
+ "location": "Bangalore Urban, Karnataka, India",
130
+ "logo_url": "https://media.licdn.com/dms/image/C510BAQErubCcqX1TpQ/company-logo_400_400/0/1557939499088?e=1700697600&v=beta&t=Xg1IsjPqVYofRGXdFdP-rE2qMtSOikmRcu-wjcPjx1I"
131
+ }, {
132
+ "starts_at": {
133
+ "day": 1,
134
+ "month": 1,
135
+ "year": 2011
136
+ },
137
+ "ends_at": {
138
+ "day": 1,
139
+ "month": 1,
140
+ "year": 2013
141
+ },
142
+ "company": "IBM",
143
+ "company_linkedin_profile_url": "https://www.linkedin.com/company/ibm/",
144
+ "title": "Storage Solution Architect",
145
+ "description": "Worked with the Storage Lab Services, India System and Technology Lab (ISTL) which comes under the umbrella of India Software Lab (ISL), IBM\\n\\n\\u2022\\tDesigning and testing of various IBM System Virtualization Storage solution for arrays, like SVC and V7000.\\n\\u2022\\tDesigning of complex SAN environment for various clients and providing them training on various IBM System Storage products, upcoming technologies, certifications, etc.\\n\\u2022\\tDesigning and testing of Disaster Recovery Solution with various Replication Technologies\\n\\u2022\\tBandwidth Calculation Tool Development of Disaster Recovery solutions\\n\\u2022\\tPerformance Assessment of Storage Environment and Storage Health Check Assessment\\n\\u2022\\tMultiple projects handled for System and Technology Group (STG), GTS (including SO), GBS ranging from architecture, assessments, performance engineering, Offering development, Technical Project Management and customized STG trainings.\\n\\u2022\\tCollaborate with all Services teams (GTS, GBS, SWG Lab Services and our IBM Business Partners) in the optimal deployment of IBM\\u00ae STG solutions\\n\\u2022\\tConducting training on upcoming technologies, IBM System Storage, IBM Certifications, etc. to different clients and business partners. \\n\\u2022\\tDefine and develop Document of Understand(DoU) and Scope of Work(SoW) before the start of any engagement",
146
+ "location": "Bangalore Urban, Karnataka, India",
147
+ "logo_url": "https://media.licdn.com/dms/image/D560BAQGiz5ecgpCtkA/company-logo_400_400/0/1688684715427?e=1700697600&v=beta&t=u5Re06Myc9BstE2M6qN9MMORKODl8XmTlCBTPMoVtXk"
148
+ }, {
149
+ "starts_at": {
150
+ "day": 1,
151
+ "month": 5,
152
+ "year": 2008
153
+ },
154
+ "ends_at": {
155
+ "day": 1,
156
+ "month": 1,
157
+ "year": 2011
158
+ },
159
+ "company": "Hitachi Data Systems",
160
+ "company_linkedin_profile_url": "https://www.linkedin.com/company/hitachi-vantara/",
161
+ "title": "Performance Consultant",
162
+ "description": "Worked for HITACHI Data Systems (HDS) with the Global Solution Strategy and Development Team as a Performance Consultant\\n\\n\\u2022\\tBenchmark raw/application performance testing on HDS Modular Storage\\n\\u2022\\tAuthor of 20+ HDS performance best practice and white paper (ESRP-Exchange 2007/2010) for HDS Sales.\\n\\u2022\\tTrained Microsoft Exchange Rangers and designed 10+ Exchange Storage Solutions. \\n\\u2022\\tReceived Hitachi Data Systems Hat\\u2019s Off Award in April\\u20192010.\\n\\u2022\\tFulltime SNIA Corporate Member(Vendor Large Voting Membership)",
163
+ "location": "Kolkata, West Bengal, India",
164
+ "logo_url": "https://media.licdn.com/dms/image/D560BAQFrBPM24o_Ruw/company-logo_400_400/0/1688414684342?e=1700697600&v=beta&t=iJrkUek9qMKrbI_eM2vbwhV-4IrPICq9bnXs_QlWj70"
165
+ }],
166
+ "education": [{
167
+ "starts_at": {
168
+ "day": 1,
169
+ "month": 1,
170
+ "year": 2023
171
+ },
172
+ "ends_at": {
173
+ "day": 1,
174
+ "month": 1,
175
+ "year": 2026
176
+ },
177
+ "field_of_study": "Data Science",
178
+ "degree_name": "Master of Technology - MTech",
179
+ "school": "Indian Institute of Technology, Hyderabad",
180
+ "school_linkedin_profile_url": null,
181
+ "description": null,
182
+ "logo_url": "https://media.licdn.com/dms/image/C4D0BAQG6UPyQQfOPnA/company-logo_400_400/0/1671364855002?e=1700697600&v=beta&t=B5C17cb-dhoFwswXeHlVcgM5pKrlkZLiv7ReqZONl5E",
183
+ "grade": null,
184
+ "activities_and_societies": null
185
+ }, {
186
+ "starts_at": {
187
+ "day": 1,
188
+ "month": 1,
189
+ "year": 2003
190
+ },
191
+ "ends_at": {
192
+ "day": 1,
193
+ "month": 1,
194
+ "year": 2007
195
+ },
196
+ "field_of_study": "Electronics and Communication Engineering",
197
+ "degree_name": "Bachelor's degree",
198
+ "school": "West Bengal University of Technology, Kolkata",
199
+ "school_linkedin_profile_url": null,
200
+ "description": null,
201
+ "logo_url": "https://media.licdn.com/dms/image/C560BAQEmEH6AAomm9Q/company-logo_400_400/0/1557680992185?e=1700697600&v=beta&t=cBbixDGUPM67tTR8vS7Vzv0eWhngf0REoXqmyqZ0D38",
202
+ "grade": null,
203
+ "activities_and_societies": null
204
+ }, {
205
+ "starts_at": {
206
+ "day": 1,
207
+ "month": 1,
208
+ "year": 1991
209
+ },
210
+ "ends_at": {
211
+ "day": 1,
212
+ "month": 1,
213
+ "year": 2003
214
+ },
215
+ "field_of_study": null,
216
+ "degree_name": null,
217
+ "school": "Kendriya Vidyalaya",
218
+ "school_linkedin_profile_url": null,
219
+ "description": null,
220
+ "logo_url": "https://media.licdn.com/dms/image/C4E0BAQFiB3Otc6s1Mw/company-logo_400_400/0/1519897001940?e=1700697600&v=beta&t=FKWNOS94gsOi8q82DmC2tJxm3e8JWCYSDnMxT476fuc",
221
+ "grade": null,
222
+ "activities_and_societies": null
223
+ }],
224
+ "languages": ["Bengali", "English", "Hindi"],
225
+ "accomplishment_organisations": [],
226
+ "accomplishment_publications": [{
227
+ "name": "Python Workshop @ City Engineering College, Bangalore",
228
+ "publisher": "Python Express",
229
+ "published_on": {
230
+ "day": 1,
231
+ "month": 1,
232
+ "year": 2016
233
+ },
234
+ "description": null,
235
+ "url": "https://pythonexpress.in/"
236
+ }, {
237
+ "name": "Python Workshop @ KLE S Nijalingappa College , Bengaluru ",
238
+ "publisher": "Python Express",
239
+ "published_on": {
240
+ "day": 1,
241
+ "month": 1,
242
+ "year": 2016
243
+ },
244
+ "description": null,
245
+ "url": "https://pythonexpress.in/"
246
+ }, {
247
+ "name": "Python Workshop @ MVIT, Bangalore",
248
+ "publisher": "Python Express",
249
+ "published_on": {
250
+ "day": 1,
251
+ "month": 1,
252
+ "year": 2016
253
+ },
254
+ "description": null,
255
+ "url": "https://pythonexpress.in/"
256
+ }, {
257
+ "name": "Storage, System and Infrastructure deployment automation using Open Source DevOps Tools (Ansible and Vagrant)",
258
+ "publisher": "PyDelhi",
259
+ "published_on": {
260
+ "day": 1,
261
+ "month": 1,
262
+ "year": 2016
263
+ },
264
+ "description": null,
265
+ "url": "https://www.youtube.com/watch?v=Epmp9uOsUYA"
266
+ }, {
267
+ "name": "Designing of LBGM on IBM Storwize V7000 & Performance Monitoring",
268
+ "publisher": "IBM DevelopmentWorks",
269
+ "published_on": null,
270
+ "description": null,
271
+ "url": "https://www.ibm.com/developerworks/mydeveloperworks/blogs/869bac74-5fc2-4b94-81a2-6153890e029a/entry/february_16_2012_3_54_am9?lang=en"
272
+ }, {
273
+ "name": "HDS Exchange Solution Whitepapers",
274
+ "publisher": "HDS and Microsoft",
275
+ "published_on": null,
276
+ "description": null,
277
+ "url": "http://technet.microsoft.com/en-us/exchange/ff182054.aspx"
278
+ }],
279
+ "accomplishment_honors_awards": [{
280
+ "title": "Broadcom Spot Award",
281
+ "issuer": "Broadcom",
282
+ "issued_on": {
283
+ "day": 1,
284
+ "month": 11,
285
+ "year": 2016
286
+ },
287
+ "description": null
288
+ }, {
289
+ "title": "NetApp Champion's Award ",
290
+ "issuer": "NetApp",
291
+ "issued_on": {
292
+ "day": 1,
293
+ "month": 10,
294
+ "year": 2015
295
+ },
296
+ "description": null
297
+ }, {
298
+ "title": "EMC Quarterly Award",
299
+ "issuer": "EMC Corporation",
300
+ "issued_on": {
301
+ "day": 1,
302
+ "month": 1,
303
+ "year": 2014
304
+ },
305
+ "description": null
306
+ }, {
307
+ "title": "EMC Quarterly Award",
308
+ "issuer": "EMC Corporation",
309
+ "issued_on": {
310
+ "day": 1,
311
+ "month": 1,
312
+ "year": 2013
313
+ },
314
+ "description": null
315
+ }, {
316
+ "title": "IBM Architect Award",
317
+ "issuer": "IBM",
318
+ "issued_on": {
319
+ "day": 1,
320
+ "month": 1,
321
+ "year": 2012
322
+ },
323
+ "description": null
324
+ }, {
325
+ "title": "HDS Hat's Off Award",
326
+ "issuer": "Hitachi Data Systems",
327
+ "issued_on": {
328
+ "day": 1,
329
+ "month": 1,
330
+ "year": 2010
331
+ },
332
+ "description": null
333
+ }],
334
+ "accomplishment_patents": [],
335
+ "accomplishment_courses": [],
336
+ "accomplishment_projects": [{
337
+ "starts_at": {
338
+ "day": 1,
339
+ "month": 11,
340
+ "year": 2014
341
+ },
342
+ "ends_at": {
343
+ "day": 1,
344
+ "month": 11,
345
+ "year": 2014
346
+ },
347
+ "title": "CompTIA Storage+ Powered by SNIA Storage Exam Development",
348
+ "description": "The team participated in the development of the Storage+ certification exam. Members developed questions for exam question bank; reviewed and revised existing questions from the bank, and provided input to CompTIA/SNIA regarding the overall content of the exam.",
349
+ "url": null
350
+ }],
351
+ "accomplishment_test_scores": [],
352
+ "volunteer_work": [],
353
+ "certifications": [{
354
+ "starts_at": {
355
+ "day": 1,
356
+ "month": 12,
357
+ "year": 2022
358
+ },
359
+ "ends_at": null,
360
+ "name": "AWS Certified Data Analytics - Specialty",
361
+ "license_number": "K6L962KKQ1BQ11KS",
362
+ "display_source": "credly.com",
363
+ "authority": "Amazon Web Services (AWS)",
364
+ "url": "https://www.credly.com/badges/7e72ca27-5134-43df-ae86-1e98631b248a/public_url"
365
+ }, {
366
+ "starts_at": {
367
+ "day": 1,
368
+ "month": 11,
369
+ "year": 2020
370
+ },
371
+ "ends_at": null,
372
+ "name": "AWS Certified Machine Learning \\u2013 Specialty",
373
+ "license_number": "7XYRPK4LC2Q11QW7",
374
+ "display_source": null,
375
+ "authority": "Amazon Web Services (AWS)",
376
+ "url": null
377
+ }, {
378
+ "starts_at": {
379
+ "day": 1,
380
+ "month": 11,
381
+ "year": 2019
382
+ },
383
+ "ends_at": null,
384
+ "name": "AWS Certified Developer -Associate",
385
+ "license_number": "AWS01010287",
386
+ "display_source": null,
387
+ "authority": "Amazon Web Services (AWS)",
388
+ "url": null
389
+ }, {
390
+ "starts_at": {
391
+ "day": 1,
392
+ "month": 11,
393
+ "year": 2019
394
+ },
395
+ "ends_at": null,
396
+ "name": "AWS Certified Solutions Architect - Associate",
397
+ "license_number": "AWS-ASA-18864",
398
+ "display_source": null,
399
+ "authority": "Amazon Web Services (AWS)",
400
+ "url": null
401
+ }, {
402
+ "starts_at": null,
403
+ "ends_at": null,
404
+ "name": "EMC Certified Information Storage Associate",
405
+ "license_number": "GV5D7YL5EJEQYR5S",
406
+ "display_source": null,
407
+ "authority": "Dell EMC",
408
+ "url": null
409
+ }, {
410
+ "starts_at": null,
411
+ "ends_at": null,
412
+ "name": "EMC Certified VPLEX Specialist Engineer",
413
+ "license_number": "QPSJZM8QM11427T1",
414
+ "display_source": null,
415
+ "authority": "Dell EMC",
416
+ "url": null
417
+ }, {
418
+ "starts_at": null,
419
+ "ends_at": null,
420
+ "name": "IBM Certified Mid-range Storage Professional",
421
+ "license_number": null,
422
+ "display_source": null,
423
+ "authority": "IBM",
424
+ "url": null
425
+ }, {
426
+ "starts_at": null,
427
+ "ends_at": null,
428
+ "name": "SNIA Certified Storage Architect",
429
+ "license_number": null,
430
+ "display_source": null,
431
+ "authority": "SNIA",
432
+ "url": null
433
+ }, {
434
+ "starts_at": null,
435
+ "ends_at": null,
436
+ "name": "SNIA Certified Storage Professional",
437
+ "license_number": null,
438
+ "display_source": null,
439
+ "authority": "SNIA",
440
+ "url": null
441
+ }, {
442
+ "starts_at": null,
443
+ "ends_at": null,
444
+ "name": "VMware Certified Associate(VCA) - Data Center Virtualization",
445
+ "license_number": "VMW-01170564Y-00382439",
446
+ "display_source": null,
447
+ "authority": "VMware",
448
+ "url": null
449
+ }],
450
+ "connections": null,
451
+ "people_also_viewed": [],
452
+ "recommendations": [],
453
+ "activities": [],
454
+ "similarly_named_profiles": [{
455
+ "name": "Suman Debnath",
456
+ "link": "https://in.linkedin.com/in/suman-debnath-28962a9a",
457
+ "summary": "tbm at Ranbaxy",
458
+ "location": "Assam, India"
459
+ }, {
460
+ "name": "Suman Debnath",
461
+ "link": "https://in.linkedin.com/in/suman-debnath-0b29a4b5",
462
+ "summary": "Certified SuccessFactors LMS consultant (by Profession) and A Learner by Nature. Helping my clients with their Learning compliance need.",
463
+ "location": "Kolkata"
464
+ }, {
465
+ "name": "Suman Debnath",
466
+ "link": "https://in.linkedin.com/in/sumandebnath100",
467
+ "summary": "Software Engineer at PayPal",
468
+ "location": "Chennai"
469
+ }, {
470
+ "name": "Suman debnath",
471
+ "link": "https://de.linkedin.com/in/suman-debnath-0524735b",
472
+ "summary": "Restaurant Manager at Athidhi Indian Restaurant",
473
+ "location": "Wiesloch"
474
+ }],
475
+ "articles": [],
476
+ "groups": [],
477
+ "skills": [],
478
+ "inferred_salary": null,
479
+ "gender": null,
480
+ "birth_date": null,
481
+ "industry": null,
482
+ "extra": null,
483
+ "interests": [],
484
+ "personal_emails": [],
485
+ "personal_numbers": []
486
+ }
tools.py ADDED
@@ -0,0 +1,44 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from langchain.utilities import SerpAPIWrapper
2
+
3
+
4
+ class CustomSerpAPIWrapper(SerpAPIWrapper):
5
+ def __init__(self):
6
+ super(CustomSerpAPIWrapper, self).__init__()
7
+
8
+ @staticmethod
9
+ def _process_response(res: dict) -> str:
10
+ """Process response from SerpAPI."""
11
+ if "error" in res.keys():
12
+ raise ValueError(f"Got error from SerpAPI: {res['error']}")
13
+ if "answer_box" in res.keys() and "answer" in res["answer_box"].keys():
14
+ toret = res["answer_box"]["answer"]
15
+ elif "answer_box" in res.keys() and "snippet" in res["answer_box"].keys():
16
+ toret = res["answer_box"]["snippet"]
17
+ elif (
18
+ "answer_box" in res.keys()
19
+ and "snippet_highlighted_words" in res["answer_box"].keys()
20
+ ):
21
+ toret = res["answer_box"]["snippet_highlighted_words"][0]
22
+ elif (
23
+ "sports_results" in res.keys()
24
+ and "game_spotlight" in res["sports_results"].keys()
25
+ ):
26
+ toret = res["sports_results"]["game_spotlight"]
27
+ elif (
28
+ "knowledge_graph" in res.keys()
29
+ and "description" in res["knowledge_graph"].keys()
30
+ ):
31
+ toret = res["knowledge_graph"]["description"]
32
+ elif "snippet" in res["organic_results"][0].keys():
33
+ toret = res["organic_results"][0]["link"]
34
+
35
+ else:
36
+ toret = "No good search result found"
37
+ return toret
38
+
39
+
40
+ def get_profile_url(name: str):
41
+ """Searches for Linkedin or twitter Profile Page."""
42
+ search = CustomSerpAPIWrapper()
43
+ res = search.run(f"{name}")
44
+ return res