text
stringlengths
6
947k
repo_name
stringlengths
5
100
path
stringlengths
4
231
language
stringclasses
1 value
license
stringclasses
15 values
size
int64
6
947k
score
float64
0
0.34
import argparse import sys import traceback as tb from datetime import datetime from cfme.utils.path import log_path from cfme.utils.providers import list_provider_keys, get_mgmt def parse_cmd_line(): parser = argparse.ArgumentParser(argument_default=None) parser.add_argument('--nic-template', help='NIC Name template to be removed', default="test", type=str) parser.add_argument('--pip-template', help='PIP Name template to be removed', default="test", type=str) parser.add_argument('--days-old', help='--days-old argument to find stack items older than X days ', default="7", type=int) parser.add_argument("--output", dest="output", help="target file name, default " "'cleanup_azure.log' in " "utils.path.log_path", default=log_path.join('cleanup_azure.log').strpath) args = parser.parse_args() return args def azure_cleanup(nic_template, pip_template, days_old, output): with open(output, 'w') as report: report.write('azure_cleanup.py, NICs, PIPs and Stack Cleanup') report.write("\nDate: {}\n".format(datetime.now())) try: for provider_key in list_provider_keys('azure'): provider_mgmt = get_mgmt(provider_key) nic_list = provider_mgmt.list_free_nics(nic_template) report.write("----- Provider: {} -----\n".format(provider_key)) if nic_list: report.write("Removing Nics with the name \'{}\':\n".format(nic_template)) report.write("\n".join(str(k) for k in nic_list)) report.write("\n") provider_mgmt.remove_nics_by_search(nic_template) else: report.write("No \'{}\' NICs were found\n".format(nic_template)) pip_list = provider_mgmt.list_free_pip(pip_template) if pip_list: report.write("Removing Public IPs with the name \'{}\':\n". format(pip_template)) report.write("\n".join(str(k) for k in pip_list)) report.write("\n") provider_mgmt.remove_pips_by_search(pip_template) else: report.write("No \'{}\' Public IPs were found\n".format(pip_template)) stack_list = provider_mgmt.list_stack(days_old=days_old) if stack_list: report.write( "Removing empty Stacks:\n") for stack in stack_list: if provider_mgmt.is_stack_empty(stack): provider_mgmt.delete_stack(stack) report.write("Stack {} is empty - Removed\n".format(stack)) else: report.write("No stacks older than \'{}\' days were found\n".format( days_old)) return 0 except Exception: report.write("Something bad happened during Azure cleanup\n") report.write(tb.format_exc()) return 1 if __name__ == "__main__": args = parse_cmd_line() sys.exit(azure_cleanup(args.nic_template, args.pip_template, args.days_old, args.output))
jkandasa/integration_tests
scripts/azure_cleanup.py
Python
gpl-2.0
3,453
0.003186
# Generated by Django 3.2.4 on 2021-07-05 13:56 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ("bibliography", "0002_move_json_data"), ] operations = [ migrations.AlterModelOptions( name="entry", options={"verbose_name_plural": "Entries"}, ), ]
fiduswriter/fiduswriter
fiduswriter/bibliography/migrations/0003_alter_entry_options.py
Python
agpl-3.0
357
0
#Parsing program to sort through Investopedia import urllib2 import re #This is the code to parse the List of Terms def get_glossary(res_num): html_lowered = res_num.lower(); begin = html_lowered.find('<!-- .alphabet -->') end = html_lowered.find('<!-- .idx-1 -->') if begin == -1 or end == -1: return None else: return res_num[begin+len('<!-- .alphabet -->'):end].strip() #This is the code to parse the Title def get_title(res_num): html_lowered = res_num.lower(); begin = html_lowered.find('<title>') end = html_lowered.find('</title>') if begin == -1 or end == -1: return None else: return res_num[begin+len('<title>'):end].strip() #We start with the numbers section of Investopedia url = "http://www.investopedia.com/terms/1/" res_num="" for line in urllib2.urlopen(url): res_num+=line title_num = get_title(res_num) glossary_num = get_glossary(res_num) ##Find all hyperlinks in list then eliminate duplicates glossary_parsed_num = re.findall(r'href=[\'"]?([^\'" >]+)', glossary_num) glossary_parsed_num = list(set(glossary_parsed_num)) parent_url = 'http://www.investopedia.com' tail = ' Definition | Investopedia' short_tail = ' | Investopedia' print title_num gp_list = [] for x in glossary_parsed_num: gpn = parent_url + x res_num="" for line in urllib2.urlopen(gpn): res_num+=line gpn_title = get_title(res_num) gpn_penult = gpn_title.replace(tail,'') gpn_final = gpn_penult.replace(short_tail,'') gp_list.append(gpn_final) #The alphabet section of Investopedia terms begins here alfa = [chr(i) for i in xrange(ord('a'), ord('z')+1)] for i, v in enumerate(alfa): u = 'http://www.investopedia.com/terms/' w = '/' invest_alfa_url = u + v + w # get url info res_alfa="" for line in urllib2.urlopen(invest_alfa_url): res_alfa+=line glossary_alfa = get_glossary(res_alfa) title_alfa = get_title(res_alfa) glossary_parsed_alfa = re.findall(r'href=[\'"]?([^\'" >]+)', glossary_alfa) glossary_parsed_alfa = list(set(glossary_parsed_alfa)) print title_alfa for x in glossary_parsed_alfa: gpa = parent_url + x res_num="" for line in urllib2.urlopen(gpa): res_num+=line gpa_title = get_title(res_num) gpa_penult = gpa_title.replace(tail,'') gpa_final = gpa_penult.replace(short_tail,'') gp_list.append(gpa_final) #Write the new list to the file with open('dict.dat','w') as f: for item in gp_list: f.write('%s\n' % item) #Read back file to check the stock was added correctly with open('dict.dat') as f: gp_list = f.readlines() gp_list = map(lambda s: s.strip(), gp_list) gp_list = list(set(gp_list)) print gp_list print ''
secondfoundation/Second-Foundation-Src
src/haruspex/python/echelon/investopedia_generator.py
Python
lgpl-2.1
2,630
0.031179
# -*- coding: utf-8 -*- from django.utils.encoding import smart_str class Menu(object): namespace = None def __init__(self, renderer): self.renderer = renderer if not self.namespace: self.namespace = self.__class__.__name__ def get_nodes(self, request): """ should return a list of NavigationNode instances """ raise NotImplementedError class Modifier(object): def __init__(self, renderer): self.renderer = renderer def modify(self, request, nodes, namespace, root_id, post_cut, breadcrumb): pass class NavigationNode(object): def __init__(self, title, url, id, parent_id=None, parent_namespace=None, attr=None, visible=True): self.children = [] # do not touch self.parent = None # do not touch, code depends on this self.namespace = None # TODO: Assert why we need this and above self.title = title self.url = url self.id = id self.parent_id = parent_id self.parent_namespace = parent_namespace self.visible = visible self.attr = attr or {} # To avoid declaring a dict in defaults... def __repr__(self): return "<Navigation Node: %s>" % smart_str(self.title) def get_menu_title(self): return self.title def get_absolute_url(self): return self.url def get_attribute(self, name): return self.attr.get(name, None) def get_descendants(self): return sum(([node] + node.get_descendants() for node in self.children), []) def get_ancestors(self): if getattr(self, 'parent', None): return [self.parent] + self.parent.get_ancestors() else: return []
nimbis/django-cms
menus/base.py
Python
bsd-3-clause
1,763
0.001134
# # Copyright (c) 2014 Piston Cloud Computing, Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # import unittest class TestSequenceFunctions(unittest.TestCase): def setUp(self): pass def test_nothing(self): # make sure the shuffled sequence does not lose any elements pass if __name__ == '__main__': unittest.main()
dlenwell/refstack-client
tests/unit/tests.py
Python
apache-2.0
909
0.0011
""" This contains all the constants needed for the daemons to run """ LOGGING_CONSTANTS = { 'LOGFILE' : 'summer.log', 'MAX_LOG_SIZE' : 1048576, # 1 MEG 'BACKUP_COUNT' : 5 } def getLoggingConstants(constant): """ Returns various constants needing by the logging module """ return LOGGING_CONSTANTS.get(constant, False)
DIVERSIFY-project/SMART-GH
sensor_processing/constants.py
Python
apache-2.0
359
0.013928
############################################################################## # Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC. # Produced at the Lawrence Livermore National Laboratory. # # This file is part of Spack. # Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. # LLNL-CODE-647188 # # For details, see https://github.com/llnl/spack # Please also see the NOTICE and LICENSE files for our notice and the LGPL. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License (as # published by the Free Software Foundation) version 2.1, February 1999. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and # conditions of the GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## # from spack import * class PyPsyclone(PythonPackage): """Code generation for the PSyKAl framework from the GungHo project, as used by the LFRic model at the UK Met Office.""" homepage = "https://github.com/stfc/PSyclone" url = "https://github.com/stfc/PSyclone/archive/1.5.1.tar.gz" giturl = "https://github.com/stfc/PSyclone.git" version('1.5.1', git=giturl, commit='eba7a097175b02f75dec70616cf267b7b3170d78') version('develop', git=giturl, branch='master') depends_on('py-setuptools', type='build') depends_on('py-pyparsing', type=('build', 'run')) # Test cases fail without compatible versions of py-fparser: depends_on('py-fparser@0.0.5', type=('build', 'run'), when='@1.5.1') depends_on('py-fparser', type=('build', 'run'), when='@1.5.2:') # Dependencies only required for tests: depends_on('py-numpy', type='test') depends_on('py-nose', type='test') depends_on('py-pytest', type='test') @run_after('install') @on_package_attributes(run_tests=True) def check_build(self): # Limit py.test to search inside the build tree: touch('pytest.ini') with working_dir('src'): Executable('py.test')() def setup_environment(self, spack_env, run_env): # Allow testing with installed executables: spack_env.prepend_path('PATH', self.prefix.bin)
tmerrick1/spack
var/spack/repos/builtin/packages/py-psyclone/package.py
Python
lgpl-2.1
2,637
0.000758
import praw import requests from env import env from twitch import twitch class reddit: def __init__(self): self.r = praw.Reddit(user_agent='Heroes of the Storm Sidebar by /u/Hermes13') self.env = env() self.access_information = None def setup(self): # self.r.set_oauth_app_info( client_id=self.env.redditClientID, # client_secret=self.env.redditSecretID, # redirect_uri=self.env.redditRedirectURI) # url = self.r.get_authorize_url('uniqueKey', 'identity modconfig modcontributors wikiread', True) # import webbrowser # webbrowser.open(url) pass def connect(self): self.r.set_oauth_app_info( client_id=self.env.redditClientID, client_secret=self.env.redditSecretID, redirect_uri=self.env.redditRedirectURI) # self.access_information = self.r.get_access_information(self.env.redditAuthCode) # print self.access_information # self.r.set_access_credentials(**self.access_information) self.r.refresh_access_information(self.env.redditRefreshToken) authenticated_user=self.r.get_me() def updateSidebar(self, matches, streams, freeRotation): sidebar = self.r.get_wiki_page('heroesofthestorm', 'sidebar') sidebarWiki = sidebar.content_md if matches: sidebarWiki = sidebarWiki.replace("%%EVENTS%%", matches) if streams: sidebarWiki = sidebarWiki.replace("%%STREAMS%%", streams) if freeRotation: sidebarWiki = sidebarWiki.replace("%%FREEROTATION%%", freeRotation) self.r.update_settings(self.r.get_subreddit('heroesofthestorm'), description=sidebarWiki) return sidebarWiki.encode('ascii','ignore')
rhots/automation
heroes-sidebar-master/reddit.py
Python
isc
1,603
0.029944
# -*- coding: utf-8 -*- """ Created on Mon Jun 22 15:32:58 2015 @author: hanbre """ from __future__ import print_function import sys import numpy as np import pandas as pd import xray import datetime import netCDF4 from mpl_toolkits.basemap import Basemap import matplotlib from matplotlib.pylab import * import matplotlib.colors as colors from mpl_toolkits.axes_grid1 import make_axes_locatable from matplotlib.colors import Normalize import seaborn as sns from IPython import embed class MidpointNormalize(Normalize): def __init__(self, vmin=None, vmax=None, midpoint=None, clip=False): self.midpoint = midpoint Normalize.__init__(self, vmin, vmax, clip) def __call__(self, value, clip=None): # I'm ignoring masked values and all kinds of edge cases to make a # simple example... x, y = [self.vmin, self.midpoint, self.vmax], [0, 0.5, 1] return np.ma.masked_array(np.interp(value, x, y)) def read_data(id_in): data = xray.open_dataset(id_in) return data def plotter(vm,x,y): #fig=figure() print('plotter') xx,yy=np.meshgrid(x,y) if shape(xx)!=shape(vm): vm=vm.transpose() gases = ['O3','HCL','CL','CLY',''] if var in gases: CF = contourf(x,y,vm,linspace(np.amin(vm.values),np.amax(vm.values),10),cmap=matplotlib.cm.jet) CS=contour(x, y, vm,linspace(np.amin(vm.values),np.amax(vm.values),10),colors='k') elif var == 'T': CF = contourf(x,y,vm,linspace(np.amin(vm.values),400,10),cmap=matplotlib.cm.jet) CS=contour(x, y, vm,linspace(np.amin(vm.values),400,10),colors='k') else: norm = MidpointNormalize(midpoint=0) CF=contourf(x,y,vm,np.linspace(np.amin(vm.values),np.amax(vm.values),1000),norm=norm,cmap='seismic') CS=contour(x, y, vm,10,colors='k') xlabel(x.units);ylabel(y.units) clb = colorbar(CF); clb.set_label('('+v.units+')') #title=('{0} at {1}={2} and {3}={4}'.format(var,getattr(v,pvar1)[p1],getattr(v,pvar1)[p1].values,getattr(v,pvar2)[p2],getattr(v,pvar2)[p2].values)) #close(fig) return def meaner(v,mvars): vm = v.mean(dim=mvars) return vm def pointextr(v,pvar1,p1,pvar2,p2,pvars): vm = v[pvars] return vm if __name__=='__main__': i=0 #case_id = id_in.split('/') with open(sys.argv[1], 'r') as file_in: header=next(file_in) for line in file_in: i+=1 l=line.strip('\n').split(' ') id_in=l[0] ds=read_data(id_in) typ = l[1] print(typ) var = l[2] xvar = l[3]; yvar = l[4] v=getattr(ds,var) x=getattr(ds,xvar) y=getattr(ds,yvar) if typ == 'm': print('here') mvar1 = l[5]; mvar2 = l[6] if size(v.dims)==4: mvars = [mvar1,mvar2] else: mvars = [mvar1] vm=meaner(v,mvars) savestring = '{0}{1}{2}{3}{4}{5}{6}.png'.format(id_in,typ,var,xvar,yvar,mvar1,mvar2) print(savestring) elif typ == 'p': print('there') pvar1=l[5]; p1=int(l[7]) pvar2=l[6]; p2=int(l[8]) pvars = {pvar1: p1, pvar2: p2} vm=pointextr(v,pvar1,p1,pvar2,p2,pvars) savestring = '{0}{1}{2}{3}{4}{5}{6}{7}{8}.png'.format(id_in,typ,var,xvar,yvar,pvar1,pvar2,p1,p2) print(savestring) xis = axes([0.09, 0.1, 0.85, 0.82], axisbg = 'white') fig = figure(num = i, figsize=(10.,5.), dpi=None, facecolor='w', edgecolor='k') plotter(vm,x,y) if yvar == 'lev': print('log=True') xis.set_yscale("log") savefig(savestring,dpi=100, facecolor='w', edgecolor='w', orientation='portrait') print('again') close(fig) del(ds)
hansbrenna/NetCDF_postprocessor
plotter3.py
Python
gpl-3.0
4,021
0.034071
# Copyright 2015 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Example / benchmark for building a PTB LSTM model. Trains the model described in: (Zaremba, et. al.) Recurrent Neural Network Regularization http://arxiv.org/abs/1409.2329 There are 3 supported model configurations: =========================================== | config | epochs | train | valid | test =========================================== | small | 13 | 37.99 | 121.39 | 115.91 | medium | 39 | 48.45 | 86.16 | 82.07 | large | 55 | 37.87 | 82.62 | 78.29 The exact results may vary depending on the random initialization. The hyperparameters used in the model: - init_scale - the initial scale of the weights - learning_rate - the initial value of the learning rate - max_grad_norm - the maximum permissible norm of the gradient - num_layers - the number of LSTM layers - num_steps - the number of unrolled steps of LSTM - hidden_size - the number of LSTM units - max_epoch - the number of epochs trained with the initial learning rate - max_max_epoch - the total number of epochs for training - keep_prob - the probability of keeping weights in the dropout layer - lr_decay - the decay of the learning rate for each epoch after "max_epoch" - batch_size - the batch size The data required for this example is in the data/ dir of the PTB dataset from Tomas Mikolov's webpage: $ wget http://www.fit.vutbr.cz/~imikolov/rnnlm/simple-examples.tgz $ tar xvf simple-examples.tgz To run: $ python ptb_word_lm.py --data_path=simple-examples/data/ """ from __future__ import absolute_import from __future__ import division from __future__ import print_function import time import numpy as np import tensorflow as tf from tensorflow.models.rnn.ptb import reader flags = tf.flags logging = tf.logging flags.DEFINE_string( "model", "small", "A type of model. Possible options are: small, medium, large.") flags.DEFINE_string("data_path", None, "data_path") flags.DEFINE_bool("use_fp16", False, "Train using 16-bit floats instead of 32bit floats") FLAGS = flags.FLAGS def data_type(): return tf.float16 if FLAGS.use_fp16 else tf.float32 class PTBModel(object): """The PTB model.""" def __init__(self, is_training, config): self.batch_size = batch_size = config.batch_size self.num_steps = num_steps = config.num_steps size = config.hidden_size vocab_size = config.vocab_size self._input_data = tf.placeholder(tf.int32, [batch_size, num_steps]) self._targets = tf.placeholder(tf.int32, [batch_size, num_steps]) # Slightly better results can be obtained with forget gate biases # initialized to 1 but the hyperparameters of the model would need to be # different than reported in the paper. lstm_cell = tf.nn.rnn_cell.BasicLSTMCell(size, forget_bias=0.0, state_is_tuple=True) if is_training and config.keep_prob < 1: lstm_cell = tf.nn.rnn_cell.DropoutWrapper( lstm_cell, output_keep_prob=config.keep_prob) cell = tf.nn.rnn_cell.MultiRNNCell([lstm_cell] * config.num_layers, state_is_tuple=True) self._initial_state = cell.zero_state(batch_size, data_type()) with tf.device("/cpu:0"): embedding = tf.get_variable( "embedding", [vocab_size, size], dtype=data_type()) inputs = tf.nn.embedding_lookup(embedding, self._input_data) if is_training and config.keep_prob < 1: inputs = tf.nn.dropout(inputs, config.keep_prob) # Simplified version of tensorflow.models.rnn.rnn.py's rnn(). # This builds an unrolled LSTM for tutorial purposes only. # In general, use the rnn() or state_saving_rnn() from rnn.py. # # The alternative version of the code below is: # # inputs = [tf.squeeze(input_, [1]) # for input_ in tf.split(1, num_steps, inputs)] # outputs, state = tf.nn.rnn(cell, inputs, initial_state=self._initial_state) outputs = [] state = self._initial_state with tf.variable_scope("RNN"): for time_step in range(num_steps): if time_step > 0: tf.get_variable_scope().reuse_variables() (cell_output, state) = cell(inputs[:, time_step, :], state) outputs.append(cell_output) output = tf.reshape(tf.concat(1, outputs), [-1, size]) softmax_w = tf.get_variable( "softmax_w", [size, vocab_size], dtype=data_type()) softmax_b = tf.get_variable("softmax_b", [vocab_size], dtype=data_type()) logits = tf.matmul(output, softmax_w) + softmax_b loss = tf.nn.seq2seq.sequence_loss_by_example( [logits], [tf.reshape(self._targets, [-1])], [tf.ones([batch_size * num_steps], dtype=data_type())]) self._cost = cost = tf.reduce_sum(loss) / batch_size self._final_state = state if not is_training: return self._lr = tf.Variable(0.0, trainable=False) tvars = tf.trainable_variables() grads, _ = tf.clip_by_global_norm(tf.gradients(cost, tvars), config.max_grad_norm) optimizer = tf.train.GradientDescentOptimizer(self._lr) self._train_op = optimizer.apply_gradients(zip(grads, tvars)) self._new_lr = tf.placeholder( tf.float32, shape=[], name="new_learning_rate") self._lr_update = tf.assign(self._lr, self._new_lr) def assign_lr(self, session, lr_value): session.run(self._lr_update, feed_dict={self._new_lr: lr_value}) @property def input_data(self): return self._input_data @property def targets(self): return self._targets @property def initial_state(self): return self._initial_state @property def cost(self): return self._cost @property def final_state(self): return self._final_state @property def lr(self): return self._lr @property def train_op(self): return self._train_op class SmallConfig(object): """Small config.""" init_scale = 0.1 learning_rate = 1.0 max_grad_norm = 5 num_layers = 2 num_steps = 20 hidden_size = 200 max_epoch = 4 max_max_epoch = 13 keep_prob = 1.0 lr_decay = 0.5 batch_size = 20 vocab_size = 10000 class MediumConfig(object): """Medium config.""" init_scale = 0.05 learning_rate = 1.0 max_grad_norm = 5 num_layers = 2 num_steps = 35 hidden_size = 650 max_epoch = 6 max_max_epoch = 39 keep_prob = 0.5 lr_decay = 0.8 batch_size = 20 vocab_size = 10000 class LargeConfig(object): """Large config.""" init_scale = 0.04 learning_rate = 1.0 max_grad_norm = 10 num_layers = 2 num_steps = 35 hidden_size = 1500 max_epoch = 14 max_max_epoch = 55 keep_prob = 0.35 lr_decay = 1 / 1.15 batch_size = 20 vocab_size = 10000 class TestConfig(object): """Tiny config, for testing.""" init_scale = 0.1 learning_rate = 1.0 max_grad_norm = 1 num_layers = 1 num_steps = 2 hidden_size = 2 max_epoch = 1 max_max_epoch = 1 keep_prob = 1.0 lr_decay = 0.5 batch_size = 20 vocab_size = 10000 def run_epoch(session, model, data, eval_op, verbose=False): """Runs the model on the given data.""" epoch_size = ((len(data) // model.batch_size) - 1) // model.num_steps start_time = time.time() costs = 0.0 iters = 0 state = session.run(model.initial_state) for step, (x, y) in enumerate(reader.ptb_iterator(data, model.batch_size, model.num_steps)): fetches = [model.cost, model.final_state, eval_op] feed_dict = {} feed_dict[model.input_data] = x feed_dict[model.targets] = y for i, (c, h) in enumerate(model.initial_state): feed_dict[c] = state[i].c feed_dict[h] = state[i].h cost, state, _ = session.run(fetches, feed_dict) costs += cost iters += model.num_steps if verbose and step % (epoch_size // 10) == 10: print("%.3f perplexity: %.3f speed: %.0f wps" % (step * 1.0 / epoch_size, np.exp(costs / iters), iters * model.batch_size / (time.time() - start_time))) return np.exp(costs / iters) def get_config(): if FLAGS.model == "small": return SmallConfig() elif FLAGS.model == "medium": return MediumConfig() elif FLAGS.model == "large": return LargeConfig() elif FLAGS.model == "test": return TestConfig() else: raise ValueError("Invalid model: %s", FLAGS.model) def main(_): if not FLAGS.data_path: raise ValueError("Must set --data_path to PTB data directory") raw_data = reader.ptb_raw_data(FLAGS.data_path) train_data, valid_data, test_data, _ = raw_data config = get_config() eval_config = get_config() eval_config.batch_size = 1 eval_config.num_steps = 1 with tf.Graph().as_default(), tf.Session() as session: initializer = tf.random_uniform_initializer(-config.init_scale, config.init_scale) with tf.variable_scope("model", reuse=None, initializer=initializer): m = PTBModel(is_training=True, config=config) with tf.variable_scope("model", reuse=True, initializer=initializer): mvalid = PTBModel(is_training=False, config=config) mtest = PTBModel(is_training=False, config=eval_config) tf.initialize_all_variables().run() for i in range(config.max_max_epoch): lr_decay = config.lr_decay ** max(i - config.max_epoch, 0.0) m.assign_lr(session, config.learning_rate * lr_decay) print("Epoch: %d Learning rate: %.3f" % (i + 1, session.run(m.lr))) train_perplexity = run_epoch(session, m, train_data, m.train_op, verbose=True) print("Epoch: %d Train Perplexity: %.3f" % (i + 1, train_perplexity)) valid_perplexity = run_epoch(session, mvalid, valid_data, tf.no_op()) print("Epoch: %d Valid Perplexity: %.3f" % (i + 1, valid_perplexity)) test_perplexity = run_epoch(session, mtest, test_data, tf.no_op()) print("Test Perplexity: %.3f" % test_perplexity) if __name__ == "__main__": tf.app.run()
naturali/tensorflow
tensorflow/models/rnn/ptb/ptb_word_lm.py
Python
apache-2.0
10,545
0.010906
import logging import logging.config import sys import threading import os from amberclient.collision_avoidance.collision_avoidance_proxy import CollisionAvoidanceProxy from amberclient.common.amber_client import AmberClient from amberclient.location.location import LocationProxy from amberclient.roboclaw.roboclaw import RoboclawProxy from amberdriver.common.message_handler import MessageHandler from amberdriver.drive_to_point import drive_to_point_pb2 from amberdriver.drive_to_point.drive_to_point import DriveToPoint from amberdriver.tools import config __author__ = 'paoolo' pwd = os.path.dirname(os.path.abspath(__file__)) logging.config.fileConfig('%s/drive_to_point.ini' % pwd) config.add_config_ini('%s/drive_to_point.ini' % pwd) LOGGER_NAME = 'DriveToPointController' USE_COLLISION_AVOIDANCE = config.DRIVE_TO_POINT_USE_COLLISION_AVOIDANCE == 'True' class DriveToPointController(MessageHandler): def __init__(self, pipe_in, pipe_out, driver): MessageHandler.__init__(self, pipe_in, pipe_out) self.__drive_to_point = driver self.__logger = logging.getLogger(LOGGER_NAME) def handle_data_message(self, header, message): if message.HasExtension(drive_to_point_pb2.setTargets): self.__handle_set_targets(header, message) elif message.HasExtension(drive_to_point_pb2.getNextTarget): self.__handle_get_next_target(header, message) elif message.HasExtension(drive_to_point_pb2.getNextTargets): self.__handle_get_next_targets(header, message) elif message.HasExtension(drive_to_point_pb2.getVisitedTarget): self.__handle_get_visited_target(header, message) elif message.HasExtension(drive_to_point_pb2.getVisitedTargets): self.__handle_get_visited_targets(header, message) elif message.HasExtension(drive_to_point_pb2.getConfiguration): self.__handle_get_configuration(header, message) else: self.__logger.warning('No request in message') def __handle_set_targets(self, header, message): self.__logger.debug('Set targets') targets = message.Extensions[drive_to_point_pb2.targets] targets = zip(targets.longitudes, targets.latitudes, targets.radiuses) self.__drive_to_point.set_targets(targets) @MessageHandler.handle_and_response def __handle_get_next_target(self, received_header, received_message, response_header, response_message): self.__logger.debug('Get next target') next_target, current_location = self.__drive_to_point.get_next_target_and_location() targets = response_message.Extensions[drive_to_point_pb2.targets] targets.longitudes.extend([next_target[0]]) targets.latitudes.extend([next_target[1]]) targets.radiuses.extend([next_target[2]]) location = response_message.Extensions[drive_to_point_pb2.location] location.x, location.y, location.p, location.alfa, location.timeStamp = current_location response_message.Extensions[drive_to_point_pb2.getNextTarget] = True return response_header, response_message @MessageHandler.handle_and_response def __handle_get_next_targets(self, received_header, received_message, response_header, response_message): self.__logger.debug('Get next targets') next_targets, current_location = self.__drive_to_point.get_next_targets_and_location() targets = response_message.Extensions[drive_to_point_pb2.targets] targets.longitudes.extend(map(lambda next_target: next_target[0], next_targets)) targets.latitudes.extend(map(lambda next_target: next_target[1], next_targets)) targets.radiuses.extend(map(lambda next_target: next_target[2], next_targets)) location = response_message.Extensions[drive_to_point_pb2.location] location.x, location.y, location.p, location.alfa, location.timeStamp = current_location response_message.Extensions[drive_to_point_pb2.getNextTargets] = True return response_header, response_message @MessageHandler.handle_and_response def __handle_get_visited_target(self, received_header, received_message, response_header, response_message): self.__logger.debug('Get visited target') visited_target, current_location = self.__drive_to_point.get_visited_target_and_location() targets = response_message.Extensions[drive_to_point_pb2.targets] targets.longitudes.extend([visited_target[0]]) targets.latitudes.extend([visited_target[1]]) targets.radiuses.extend([visited_target[2]]) location = response_message.Extensions[drive_to_point_pb2.location] location.x, location.y, location.p, location.alfa, location.timeStamp = current_location response_message.Extensions[drive_to_point_pb2.getVisitedTarget] = True return response_header, response_message @MessageHandler.handle_and_response def __handle_get_visited_targets(self, received_header, received_message, response_header, response_message): self.__logger.debug('Get visited targets') visited_targets, current_location = self.__drive_to_point.get_visited_targets_and_location() targets = response_message.Extensions[drive_to_point_pb2.targets] targets.longitudes.extend(map(lambda target: target[0], visited_targets)) targets.latitudes.extend(map(lambda target: target[1], visited_targets)) targets.radiuses.extend(map(lambda target: target[2], visited_targets)) location = response_message.Extensions[drive_to_point_pb2.location] location.x, location.y, location.p, location.alfa, location.timeStamp = current_location response_message.Extensions[drive_to_point_pb2.getVisitedTargets] = True return response_header, response_message @MessageHandler.handle_and_response def __handle_get_configuration(self, received_header, received_message, response_header, response_message): self.__logger.debug('Get configuration') configuration = response_message.Extensions[drive_to_point_pb2.configuration] configuration.maxSpeed = self.__drive_to_point.MAX_SPEED response_message.Extensions[drive_to_point_pb2.getConfiguration] = True return response_header, response_message def handle_subscribe_message(self, header, message): self.__logger.debug('Subscribe action, nothing to do...') def handle_unsubscribe_message(self, header, message): self.__logger.debug('Unsubscribe action, nothing to do...') def handle_client_died_message(self, client_id): self.__logger.info('Client %d died, stop!', client_id) self.__drive_to_point.set_targets([]) if __name__ == '__main__': client_for_location = AmberClient('127.0.0.1', name="location") client_for_driver = AmberClient('127.0.0.1', name="driver") location_proxy = LocationProxy(client_for_location, 0) if USE_COLLISION_AVOIDANCE: driver_proxy = CollisionAvoidanceProxy(client_for_driver, 0) else: driver_proxy = RoboclawProxy(client_for_driver, 0) drive_to_point = DriveToPoint(driver_proxy, location_proxy) driving_thread = threading.Thread(target=drive_to_point.driving_loop, name="driving-thread") driving_thread.start() location_thread = threading.Thread(target=drive_to_point.location_loop, name="location-thread") location_thread.start() controller = DriveToPointController(sys.stdin, sys.stdout, drive_to_point) controller()
showmen15/testEEE
src/amberdriver/drive_to_point/drive_to_point_controller.py
Python
mit
7,564
0.003173
"""Models for Zeroconf.""" import asyncio from typing import Any from zeroconf import DNSPointer, DNSRecord, ServiceBrowser, Zeroconf from zeroconf.asyncio import AsyncZeroconf class HaZeroconf(Zeroconf): """Zeroconf that cannot be closed.""" def close(self) -> None: """Fake method to avoid integrations closing it.""" ha_close = Zeroconf.close class HaAsyncZeroconf(AsyncZeroconf): """Home Assistant version of AsyncZeroconf.""" def __init__( # pylint: disable=super-init-not-called self, *args: Any, **kwargs: Any ) -> None: """Wrap AsyncZeroconf.""" self.zeroconf = HaZeroconf(*args, **kwargs) self.loop = asyncio.get_running_loop() async def async_close(self) -> None: """Fake method to avoid integrations closing it.""" class HaServiceBrowser(ServiceBrowser): """ServiceBrowser that only consumes DNSPointer records.""" def update_record(self, zc: Zeroconf, now: float, record: DNSRecord) -> None: """Pre-Filter update_record to DNSPointers for the configured type.""" # # Each ServerBrowser currently runs in its own thread which # processes every A or AAAA record update per instance. # # As the list of zeroconf names we watch for grows, each additional # ServiceBrowser would process all the A and AAAA updates on the network. # # To avoid overwhemling the system we pre-filter here and only process # DNSPointers for the configured record name (type) # if record.name not in self.types or not isinstance(record, DNSPointer): return super().update_record(zc, now, record)
kennedyshead/home-assistant
homeassistant/components/zeroconf/models.py
Python
apache-2.0
1,697
0.001179
issues=[ dict(name='Habit',number=5,season='Winter 2012', description='commit to a change, experience it, and record'), dict(name='Interview', number=4, season='Autumn 2011', description="this is your opportunity to inhabit another's mind"), dict(name= 'Digital Presence', number= 3, season= 'Summer 2011', description='what does your digital self look like?'), dict(name= 'Adventure', number=2, season= 'Spring 2011', description='take an adventure and write about it.'), dict(name= 'Unplugging', number=1, season= 'Winter 2011', description='what are you looking forward to leaving?') ] siteroot='/Users/adam/open review quarterly/source/' infodir='/Users/adam/open review quarterly/info' skip_issues_before=5 illustration_tag='=== Illustration ===' illustration_tag_sized="=== Illustration width: 50% ==="
adamgreenhall/openreviewquarterly
builder/config.py
Python
mit
879
0.025028
"""Spyse OODA behaviour module""" import time # http://www.mindsim.com/MindSim/Corporate/OODA.html # http://www.d-n-i.net/second_level/boyd_military.htm # http://www.belisarius.com/modern_business_strategy/boyd/essence/eowl_frameset.htm # http://www.valuebasedmanagement.net/methods_boyd_ooda_loop.html # http://www.fastcompany.com/magazine/59/pilot.html # # The OODA loop (Observe, Orient, Decide, and Act) is an # information strategy concept for information warfare # developed by Colonel John Boyd (1927-1997). Although the # OODA model was clearly created for military purposes, # elements of the same theory can also be applied to business # strategy. Boyd developed the theory based on his earlier # experience as a fighter pilot and work on energy maneuverability. # He initially used it to explain victory in air-to-air combat, # but in the last years of his career he expanded his OODA loop # theory into a grand strategy that would defeat an enemy # strategically by pychological paralysis. from spyse.core.behaviours.fsm import FSMBehaviour class Observation(object): pass class Orientation(object): pass class Decision(object): pass class Action(object): pass class OODABehaviour(FSMBehaviour): pass
davidko/evolspyse
core/behaviours/ooda.py
Python
lgpl-2.1
1,291
0.000775
# -*- coding: utf-8 -*- from odoo import fields, models class CustomSurvey(models.Model): _inherit = 'survey.survey' auth_required = fields.Boolean('Login required', help="Users with a public link will be requested to login before taking part to the survey", oldname="authenticate", default=True) users_can_go_back = fields.Boolean('Users can go back', help="If checked, users can go back to previous pages.", default=True)
hgsoft/hgsoft-addons
custom_survey_multi_emails_and_portal/models/custom_survey.py
Python
gpl-3.0
454
0.015419
# -*- coding: utf-8 -*- # # pybgpstream documentation build configuration file, created by # sphinx-quickstart on Mon Jan 19 11:07:23 2015. # # This file is execfile()d with the current directory set to its # containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import sys import os import sphinx_bootstrap_theme # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.insert(0, os.path.abspath('../src')) # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.todo', 'sphinx.ext.viewcode', ] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = u'pybgpstream' copyright = u'2015, The Regents of the University of California' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = '1.0' # The full version, including alpha/beta/rc tags. release = '1.0' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. #language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ['_build'] # The reST default role (used for this markup: `text`) to use for all # documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built documents. #keep_warnings = False todo_include_todos = True # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'bootstrap' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. #html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. html_theme_path = sphinx_bootstrap_theme.get_html_theme_path() # The name for this set of Sphinx documents. If None, it defaults to # "<project> v<release> documentation". #html_title = None # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. #html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. #html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied # directly to the root of the documentation. #html_extra_path = [] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. #html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated. #html_domain_indices = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. #html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. #html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a <link> tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = 'pybgpstreamdoc' # -- Options for LaTeX output --------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). #'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). #'pointsize': '10pt', # Additional stuff for the LaTeX preamble. #'preamble': '', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ ('index', 'pybgpstream.tex', u'pybgpstream Documentation', u'Alistair King', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # If true, show page references after internal links. #latex_show_pagerefs = False # If true, show URL addresses after external links. #latex_show_urls = False # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_domain_indices = True # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ ('index', 'pybgpstream', u'pybgpstream Documentation', [u'Alistair King'], 1) ] # If true, show URL addresses after external links. #man_show_urls = False # -- Options for Texinfo output ------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ ('index', 'pybgpstream', u'pybgpstream Documentation', u'Alistair King', 'pybgpstream', 'One line description of project.', 'Miscellaneous'), ] # Documents to append as an appendix to all manuals. #texinfo_appendices = [] # If false, no module index is generated. #texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. #texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. #texinfo_no_detailmenu = False
CAIDA/bgpstream
pybgpstream/docs/conf.py
Python
gpl-2.0
8,420
0.005819
import unittest class ATest(unittest.TestCase): def setUp(self): print("setup") pass def test_a(self): self.assertTrue(True) def tearDown(self): print("tear down") if __name__ == "__main__": print("masher_test.py") unittest.main()
bonattt/name-masher
tests/masher_test.py
Python
mit
289
0.00346
# Copyright (c) 2010, 2011 Timothy Lovorn # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. import os from FileDict import FileDict from ControllerQueue import ControllerQueue from Grapher import Grapher DEFAULT_MAX_PROCESSES = 2 class RunInterface(object): def __init__(self, path): self.path = path def doRun(self, configFiles, maxProcesses=DEFAULT_MAX_PROCESSES): """Run a controller for each config in configFiles.""" queue = ControllerQueue(configFiles, maxProcesses) queue.runAll() def graphOneSeries(self, configFiles, xVar, yVar, outputName): grapher = Grapher() fig, axes = grapher.simple2D(configFiles, "config", xVar, "state", yVar) grapher.setAxisLabels(axes, xVar, yVar) figurePath = os.path.join(self.path, outputName) grapher.saveFigure(fig, figurePath) return fig, axes def graphSeriesDict(self, seriesDict, seriesLabel, xVar, yVar, outputName, legend_title=None): grapher = Grapher() fig, axes = grapher.plotSeriesDict(seriesDict, seriesLabel, "config", xVar, "state", yVar, legend_title=legend_title) grapher.setAxisLabels(axes, xVar, yVar) figurePath = os.path.join(self.path, outputName) grapher.saveFigure(fig, figurePath) return fig, axes def makeRun(self, baseConfig, runData): """Make a new run of config files from the base config and runData. runData is a list of tuples which contain a label and a dict. Labels are used to name generated configs and their specified output files. The dicts are key-value pairs for data to modify in the base config. Return a list of the names of config files generated. """ configNames = [] baseConfigFullPath = os.path.join(self.path, baseConfig) for label, labelData in runData: newConfig = FileDict(baseConfigFullPath) newConfigFullPath = os.path.join(self.path, label + "_config") labelData.update({"outputLogName" : label + "_out.fd", "errorLogName" : label + "_error", "debugLogName" : label + "_debug"}) for key, value in labelData.items(): newConfig.setGlobal(str(key), str(value)) configNames.append(newConfigFullPath) newConfig.writeToFile(newConfigFullPath) return configNames def oneDimRun(self, baseConfig, label, varName, minimum, maximum, step): """One-dimensional run with varName from [minimum, maximum). Returns names of config files. """ index = 0 varValue = minimum runData = [] while varValue < maximum: runData.append((label + "_" + str(index) + "_", {varName : varValue})) varValue += step index += 1 return self.makeRun(baseConfig, runData) def multiDimRun(self, baseConfig, label, varDataList): configs = [] for index, data in enumerate(varDataList): varName, minimum, maximum, step = data if len(configs) == 0: label += "_%s_" % varName configs = self.oneDimRun(baseConfig, label, varName, minimum, maximum, step) else: newConfigs = [] label += "_%s_" % varName for index, some_config in enumerate(configs): newConfigs.extend(self.oneDimRun(some_config, label + "_" + str(index) + "_", varName, minimum, maximum, step)) configs = newConfigs return configs
tflovorn/scSuperSolver
src/RunInterface.py
Python
mit
4,820
0.002905
# Copyright 2019 Virgil Dupras # # This software is licensed under the "GPLv3" License as described in the "LICENSE" file, # which should be included with this package. The terms are also available at # http://www.gnu.org/licenses/gpl-3.0.html import datetime from collections import defaultdict from core.util import dedupe, first as getfirst from core.trans import tr from ..model.date import DateFormat from .base import GUIObject from .import_table import ImportTable from .selectable_list import LinkedSelectableList DAY = 'day' MONTH = 'month' YEAR = 'year' class SwapType: DayMonth = 0 MonthYear = 1 DayYear = 2 DescriptionPayee = 3 InvertAmount = 4 def last_two_digits(year): return year - ((year // 100) * 100) def swapped_date(date, first, second): attrs = {DAY: date.day, MONTH: date.month, YEAR: last_two_digits(date.year)} newattrs = {first: attrs[second], second: attrs[first]} if YEAR in newattrs: newattrs[YEAR] += 2000 return date.replace(**newattrs) def swap_format_elements(format, first, second): # format is a DateFormat swapped = format.copy() elems = swapped.elements TYPE2CHAR = {DAY: 'd', MONTH: 'M', YEAR: 'y'} first_char = TYPE2CHAR[first] second_char = TYPE2CHAR[second] first_index = [i for i, x in enumerate(elems) if x.startswith(first_char)][0] second_index = [i for i, x in enumerate(elems) if x.startswith(second_char)][0] elems[first_index], elems[second_index] = elems[second_index], elems[first_index] return swapped class AccountPane: def __init__(self, iwin, account, target_account, parsing_date_format): self.iwin = iwin self.account = account self._selected_target = target_account self.name = account.name entries = iwin.loader.accounts.entries_for_account(account) self.count = len(entries) self.matches = [] # [[ref, imported]] self.parsing_date_format = parsing_date_format self.max_day = 31 self.max_month = 12 self.max_year = 99 # 2 digits self._match_entries() self._swap_possibilities = set() self._compute_swap_possibilities() def _compute_swap_possibilities(self): entries = list(self.iwin.loader.accounts.entries_for_account(self.account)) if not entries: return self._swap_possibilities = set([(DAY, MONTH), (MONTH, YEAR), (DAY, YEAR)]) for first, second in self._swap_possibilities.copy(): for entry in entries: try: swapped_date(entry.date, first, second) except ValueError: self._swap_possibilities.remove((first, second)) break def _match_entries(self): to_import = list(self.iwin.loader.accounts.entries_for_account(self.account)) reference2entry = {} for entry in (e for e in to_import if e.reference): reference2entry[entry.reference] = entry self.matches = [] if self.selected_target is not None: entries = self.iwin.document.accounts.entries_for_account(self.selected_target) for entry in entries: if entry.reference in reference2entry: other = reference2entry[entry.reference] if entry.reconciled: self.iwin.import_table.dont_import.add(other) to_import.remove(other) del reference2entry[entry.reference] else: other = None if other is not None or not entry.reconciled: self.matches.append([entry, other]) self.matches += [[None, entry] for entry in to_import] self._sort_matches() def _sort_matches(self): self.matches.sort(key=lambda t: t[0].date if t[0] is not None else t[1].date) def bind(self, existing, imported): [match1] = [m for m in self.matches if m[0] is existing] [match2] = [m for m in self.matches if m[1] is imported] assert match1[1] is None assert match2[0] is None match1[1] = match2[1] self.matches.remove(match2) def can_swap_date_fields(self, first, second): # 'day', 'month', 'year' return (first, second) in self._swap_possibilities or (second, first) in self._swap_possibilities def match_entries_by_date_and_amount(self, threshold): delta = datetime.timedelta(days=threshold) unmatched = ( to_import for ref, to_import in self.matches if ref is None) unmatched_refs = ( ref for ref, to_import in self.matches if to_import is None) amount2refs = defaultdict(list) for entry in unmatched_refs: amount2refs[entry.amount].append(entry) for entry in unmatched: if entry.amount not in amount2refs: continue potentials = amount2refs[entry.amount] for ref in potentials: if abs(ref.date - entry.date) <= delta: self.bind(ref, entry) potentials.remove(ref) self._sort_matches() def unbind(self, existing, imported): [match] = [m for m in self.matches if m[0] is existing and m[1] is imported] match[1] = None self.matches.append([None, imported]) self._sort_matches() @property def selected_target(self): return self._selected_target @selected_target.setter def selected_target(self, value): self._selected_target = value self._match_entries() # This is a modal window that is designed to be re-instantiated on each import # run. It is shown modally by the UI as soon as its created on the UI side. class ImportWindow(GUIObject): # --- View interface # close() # close_selected_tab() # set_swap_button_enabled(enabled: bool) # update_selected_pane() # show() # def __init__(self, mainwindow, target_account=None): super().__init__() if not hasattr(mainwindow, 'loader'): raise ValueError("Nothing to import!") self.mainwindow = mainwindow self.document = mainwindow.document self.app = self.document.app self._selected_pane_index = 0 self._selected_target_index = 0 def setfunc(index): self.view.set_swap_button_enabled(self.can_perform_swap()) self.swap_type_list = LinkedSelectableList(items=[ "<placeholder> Day <--> Month", "<placeholder> Month <--> Year", "<placeholder> Day <--> Year", tr("Description <--> Payee"), tr("Invert Amounts"), ], setfunc=setfunc) self.swap_type_list.selected_index = SwapType.DayMonth self.panes = [] self.import_table = ImportTable(self) self.loader = self.mainwindow.loader self.target_accounts = [ a for a in self.document.accounts if a.is_balance_sheet_account()] self.target_accounts.sort(key=lambda a: a.name.lower()) accounts = [] for account in self.loader.accounts: if account.is_balance_sheet_account(): entries = self.loader.accounts.entries_for_account(account) if len(entries): new_name = self.document.accounts.new_name(account.name) if new_name != account.name: self.loader.accounts.rename_account(account, new_name) accounts.append(account) parsing_date_format = DateFormat.from_sysformat(self.loader.parsing_date_format) for account in accounts: target = target_account if target is None and account.reference: target = getfirst( t for t in self.target_accounts if t.reference == account.reference ) self.panes.append( AccountPane(self, account, target, parsing_date_format)) # --- Private def _can_swap_date_fields(self, first, second): # 'day', 'month', 'year' pane = self.selected_pane if pane is None: return False return pane.can_swap_date_fields(first, second) def _invert_amounts(self, apply_to_all): if apply_to_all: panes = self.panes else: panes = [self.selected_pane] for pane in panes: entries = self.loader.accounts.entries_for_account(pane.account) txns = dedupe(e.transaction for e in entries) for txn in txns: for split in txn.splits: split.amount = -split.amount self.import_table.refresh() def _refresh_target_selection(self): if not self.panes: return target = self.selected_pane.selected_target self._selected_target_index = 0 if target is not None: try: self._selected_target_index = self.target_accounts.index(target) + 1 except ValueError: pass def _refresh_swap_list_items(self): if not self.panes: return items = [] basefmt = self.selected_pane.parsing_date_format for first, second in [(DAY, MONTH), (MONTH, YEAR), (DAY, YEAR)]: swapped = swap_format_elements(basefmt, first, second) items.append("{} --> {}".format(basefmt.iso_format, swapped.iso_format)) self.swap_type_list[:3] = items def _swap_date_fields(self, first, second, apply_to_all): # 'day', 'month', 'year' assert self._can_swap_date_fields(first, second) if apply_to_all: panes = [p for p in self.panes if p.can_swap_date_fields(first, second)] else: panes = [self.selected_pane] def switch_func(txn): txn.date = swapped_date(txn.date, first, second) self._swap_fields(panes, switch_func) # Now, lets' change the date format on these panes for pane in panes: basefmt = self.selected_pane.parsing_date_format swapped = swap_format_elements(basefmt, first, second) pane.parsing_date_format = swapped pane._sort_matches() self.import_table.refresh() self._refresh_swap_list_items() def _swap_description_payee(self, apply_to_all): if apply_to_all: panes = self.panes else: panes = [self.selected_pane] def switch_func(txn): txn.description, txn.payee = txn.payee, txn.description self._swap_fields(panes, switch_func) def _swap_fields(self, panes, switch_func): seen = set() for pane in panes: entries = self.loader.accounts.entries_for_account(pane.account) txns = dedupe(e.transaction for e in entries) for txn in txns: if txn.affected_accounts() & seen: # We've already swapped this txn in a previous pane. continue switch_func(txn) seen.add(pane.account) self.import_table.refresh() def _update_selected_pane(self): self.import_table.refresh() self._refresh_swap_list_items() self.view.update_selected_pane() self.view.set_swap_button_enabled(self.can_perform_swap()) # --- Override def _view_updated(self): if self.document.can_restore_from_prefs(): self.restore_view() # XXX Logically, we should call _update_selected_pane() but doing so # make tests fail. to investigate. self._refresh_target_selection() self.view.update_selected_pane() self._refresh_swap_list_items() self.import_table.refresh() # --- Public def can_perform_swap(self): index = self.swap_type_list.selected_index if index == SwapType.DayMonth: return self._can_swap_date_fields(DAY, MONTH) elif index == SwapType.MonthYear: return self._can_swap_date_fields(MONTH, YEAR) elif index == SwapType.DayYear: return self._can_swap_date_fields(DAY, YEAR) else: return True def close_pane(self, index): was_selected = index == self.selected_pane_index del self.panes[index] if not self.panes: self.view.close() return self._selected_pane_index = min(self._selected_pane_index, len(self.panes) - 1) if was_selected: self._update_selected_pane() def import_selected_pane(self): pane = self.selected_pane matches = pane.matches matches = [ (e, ref) for ref, e in matches if e is not None and e not in self.import_table.dont_import] if pane.selected_target is not None: # We import in an existing account, adjust all the transactions accordingly target_account = pane.selected_target else: target_account = None self.document.import_entries(target_account, pane.account, matches) self.mainwindow.revalidate() self.close_pane(self.selected_pane_index) self.view.close_selected_tab() def match_entries_by_date_and_amount(self, threshold): self.selected_pane.match_entries_by_date_and_amount(threshold) self.import_table.refresh() def perform_swap(self, apply_to_all=False): index = self.swap_type_list.selected_index if index == SwapType.DayMonth: self._swap_date_fields(DAY, MONTH, apply_to_all=apply_to_all) elif index == SwapType.MonthYear: self._swap_date_fields(MONTH, YEAR, apply_to_all=apply_to_all) elif index == SwapType.DayYear: self._swap_date_fields(DAY, YEAR, apply_to_all=apply_to_all) elif index == SwapType.DescriptionPayee: self._swap_description_payee(apply_to_all=apply_to_all) elif index == SwapType.InvertAmount: self._invert_amounts(apply_to_all=apply_to_all) def restore_view(self): self.import_table.columns.restore_columns() # --- Properties @property def selected_pane(self): return self.panes[self.selected_pane_index] if self.panes else None @property def selected_pane_index(self): return self._selected_pane_index @selected_pane_index.setter def selected_pane_index(self, value): if value >= len(self.panes): return self._selected_pane_index = value self._refresh_target_selection() self._update_selected_pane() @property def selected_target_account(self): return self.selected_pane.selected_target @property def selected_target_account_index(self): return self._selected_target_index @selected_target_account_index.setter def selected_target_account_index(self, value): target = self.target_accounts[value - 1] if value > 0 else None self.selected_pane.selected_target = target self._selected_target_index = value self.import_table.refresh() @property def target_account_names(self): return [tr('< New Account >')] + [a.name for a in self.target_accounts]
hsoft/moneyguru
core/gui/import_window.py
Python
gpl-3.0
15,326
0.002088
from .base import configure_app, create_app import re find_urls = re.compile('http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+') class MailTemplatesAuthControllerTests(object): def setup(self): self.app = create_app(self.app_config, True) class TestMailTemplatesAuthControllerSQLA(MailTemplatesAuthControllerTests): @classmethod def setupClass(cls): cls.app_config = configure_app('sqlalchemy') class TestMailTemplatesAuthControllerMing(MailTemplatesAuthControllerTests): @classmethod def setupClass(cls): cls.app_config = configure_app('ming')
axant/tgapp-mailtemplates
tests/test_controller_auth.py
Python
mit
626
0.007987
# $Id$ # # Copyright (C) 2001-2008 Greg Landrum and Rational Discovery LLC # All Rights Reserved # """ Automatic search for quantization bounds This uses the expected informational gain to determine where quantization bounds should lie. **Notes**: - bounds are less than, so if the bounds are [1.,2.], [0.9,1.,1.1,2.,2.2] -> [0,1,1,2,2] """ from __future__ import print_function import numpy from rdkit.ML.InfoTheory import entropy from rdkit.six.moves import zip, map, range try: from rdkit.ML.Data import cQuantize except: hascQuantize = 0 else: hascQuantize = 1 _float_tol = 1e-8 def feq(v1,v2,tol=_float_tol): """ floating point equality with a tolerance factor **Arguments** - v1: a float - v2: a float - tol: the tolerance for comparison **Returns** 0 or 1 """ return abs(v1-v2) < tol def FindVarQuantBound(vals,results,nPossibleRes): """ Uses FindVarMultQuantBounds, only here for historic reasons """ bounds,gain = FindVarMultQuantBounds(vals,1,results,nPossibleRes) return (bounds[0],gain) def _GenVarTable(vals,cuts,starts,results,nPossibleRes): """ Primarily intended for internal use constructs a variable table for the data passed in The table for a given variable records the number of times each possible value of that variable appears for each possible result of the function. **Arguments** - vals: a 1D Numeric array with the values of the variables - cuts: a list with the indices of the quantization bounds (indices are into _starts_ ) - starts: a list of potential starting points for quantization bounds - results: a 1D Numeric array of integer result codes - nPossibleRes: an integer with the number of possible result codes **Returns** the varTable, a 2D Numeric array which is nVarValues x nPossibleRes **Notes** - _vals_ should be sorted! """ nVals = len(cuts)+1 varTable = numpy.zeros((nVals,nPossibleRes),'i') idx = 0 for i in range(nVals-1): cut = cuts[i] while idx < starts[cut]: varTable[i,results[idx]] += 1 idx += 1 while idx < len(vals): varTable[-1,results[idx]] += 1 idx += 1 return varTable def _PyRecurseOnBounds(vals,cuts,which,starts,results,nPossibleRes,varTable=None): """ Primarily intended for internal use Recursively finds the best quantization boundaries **Arguments** - vals: a 1D Numeric array with the values of the variables, this should be sorted - cuts: a list with the indices of the quantization bounds (indices are into _starts_ ) - which: an integer indicating which bound is being adjusted here (and index into _cuts_ ) - starts: a list of potential starting points for quantization bounds - results: a 1D Numeric array of integer result codes - nPossibleRes: an integer with the number of possible result codes **Returns** - a 2-tuple containing: 1) the best information gain found so far 2) a list of the quantization bound indices ( _cuts_ for the best case) **Notes** - this is not even remotely efficient, which is why a C replacement was written """ nBounds = len(cuts) maxGain = -1e6 bestCuts = None highestCutHere = len(starts) - nBounds + which if varTable is None: varTable = _GenVarTable(vals,cuts,starts,results,nPossibleRes) while cuts[which] <= highestCutHere: varTable = _GenVarTable(vals,cuts,starts,results,nPossibleRes) gainHere = entropy.InfoGain(varTable) if gainHere > maxGain: maxGain = gainHere bestCuts = cuts[:] # recurse on the next vars if needed if which < nBounds-1: gainHere,cutsHere=_RecurseOnBounds(vals,cuts[:],which+1,starts,results,nPossibleRes, varTable = varTable) if gainHere > maxGain: maxGain = gainHere bestCuts = cutsHere # update this cut cuts[which] += 1 for i in range(which+1,nBounds): if cuts[i] == cuts[i-1]: cuts[i] += 1 return maxGain,bestCuts def _NewPyRecurseOnBounds(vals,cuts,which,starts,results,nPossibleRes,varTable=None): """ Primarily intended for internal use Recursively finds the best quantization boundaries **Arguments** - vals: a 1D Numeric array with the values of the variables, this should be sorted - cuts: a list with the indices of the quantization bounds (indices are into _starts_ ) - which: an integer indicating which bound is being adjusted here (and index into _cuts_ ) - starts: a list of potential starting points for quantization bounds - results: a 1D Numeric array of integer result codes - nPossibleRes: an integer with the number of possible result codes **Returns** - a 2-tuple containing: 1) the best information gain found so far 2) a list of the quantization bound indices ( _cuts_ for the best case) **Notes** - this is not even remotely efficient, which is why a C replacement was written """ nBounds = len(cuts) maxGain = -1e6 bestCuts = None highestCutHere = len(starts) - nBounds + which if varTable is None: varTable = _GenVarTable(vals,cuts,starts,results,nPossibleRes) while cuts[which] <= highestCutHere: gainHere = entropy.InfoGain(varTable) if gainHere > maxGain: maxGain = gainHere bestCuts = cuts[:] # recurse on the next vars if needed if which < nBounds-1: gainHere,cutsHere=_RecurseOnBounds(vals,cuts[:],which+1,starts,results,nPossibleRes, varTable = None) if gainHere > maxGain: maxGain = gainHere bestCuts = cutsHere # update this cut oldCut = cuts[which] cuts[which] += 1 bot = starts[oldCut] if oldCut+1 < len(starts): top = starts[oldCut+1] else: top = starts[-1] for i in range(bot,top): v = results[i] varTable[which,v] += 1 varTable[which+1,v] -= 1 for i in range(which+1,nBounds): if cuts[i] == cuts[i-1]: cuts[i] += 1 return maxGain,bestCuts # -------------------------------- # # find all possible dividing points # # There are a couple requirements for a dividing point: # 1) the dependent variable (descriptor) must change across it, # 2) the result score must change across it # # So, in the list [(0,0),(1,0),(1,1),(2,1)]: # we should divide before (1,0) and (2,1) # # -------------------------------- def _NewPyFindStartPoints(sortVals,sortResults,nData): startNext = [] tol = 1e-8 blockAct=sortResults[0] lastBlockAct=None lastDiv=None i = 1 while i<nData: # move to the end of this block: while i<nData and sortVals[i]-sortVals[i-1]<=tol: if sortResults[i] != blockAct: # this block is heterogeneous blockAct=-1 i+=1 if lastBlockAct is None: # first time through: lastBlockAct = blockAct lastDiv = i else: if blockAct==-1 or lastBlockAct==-1 or blockAct!=lastBlockAct: startNext.append(lastDiv) lastDiv = i lastBlockAct = blockAct else: lastDiv=i if i<nData: blockAct=sortResults[i] i+=1 # catch the case that the last point also sets a bin: if blockAct != lastBlockAct : startNext.append(lastDiv) return startNext def FindVarMultQuantBounds(vals,nBounds,results,nPossibleRes): """ finds multiple quantization bounds for a single variable **Arguments** - vals: sequence of variable values (assumed to be floats) - nBounds: the number of quantization bounds to find - results: a list of result codes (should be integers) - nPossibleRes: an integer with the number of possible values of the result variable **Returns** - a 2-tuple containing: 1) a list of the quantization bounds (floats) 2) the information gain associated with this quantization """ assert len(vals) == len(results), 'vals/results length mismatch' nData = len(vals) if nData == 0: return [],-1e8 # sort the variable values: svs = list(zip(vals,results)) svs.sort() sortVals,sortResults = zip(*svs) startNext=_FindStartPoints(sortVals,sortResults,nData) if not len(startNext): return [0],0.0 if len(startNext)<nBounds: nBounds = len(startNext)-1 if nBounds == 0: nBounds=1 initCuts = list(range(nBounds)) maxGain,bestCuts = _RecurseOnBounds(sortVals,initCuts,0,startNext, sortResults,nPossibleRes) quantBounds = [] nVs = len(sortVals) for cut in bestCuts: idx = startNext[cut] if idx == nVs: quantBounds.append(sortVals[-1]) elif idx == 0: quantBounds.append(sortVals[idx]) else: quantBounds.append((sortVals[idx]+sortVals[idx-1])/2.) return quantBounds,maxGain #hascQuantize=0 if hascQuantize: _RecurseOnBounds = cQuantize._RecurseOnBounds _FindStartPoints = cQuantize._FindStartPoints else: _RecurseOnBounds = _NewPyRecurseOnBounds _FindStartPoints = _NewPyFindStartPoints if __name__ == '__main__': import sys if 1: d = [(1.,0), (1.1,0), (1.2,0), (1.4,1), (1.4,0), (1.6,1), (2.,1), (2.1,0), (2.1,0), (2.1,0), (2.2,1), (2.3,0)] varValues = list(map(lambda x:x[0],d)) resCodes = list(map(lambda x:x[1],d)) nPossibleRes = 2 res = FindVarMultQuantBounds(varValues,2,resCodes,nPossibleRes) print('RES:',res) target = ([1.3, 2.05],.34707 ) else: d = [(1.,0), (1.1,0), (1.2,0), (1.4,1), (1.4,0), (1.6,1), (2.,1), (2.1,0), (2.1,0), (2.1,0), (2.2,1), (2.3,0)] varValues = list(map(lambda x:x[0],d)) resCodes = list(map(lambda x:x[1],d)) nPossibleRes =2 res = FindVarMultQuantBounds(varValues,1,resCodes,nPossibleRes) print(res) #sys.exit(1) d = [(1.4,1), (1.4,0)] varValues = list(map(lambda x:x[0],d)) resCodes = list(map(lambda x:x[1],d)) nPossibleRes =2 res = FindVarMultQuantBounds(varValues,1,resCodes,nPossibleRes) print(res) d = [(1.4,0), (1.4,0),(1.6,1)] varValues = list(map(lambda x:x[0],d)) resCodes = list(map(lambda x:x[1],d)) nPossibleRes =2 res = FindVarMultQuantBounds(varValues,2,resCodes,nPossibleRes) print(res)
strets123/rdkit
rdkit/ML/Data/Quantize.py
Python
bsd-3-clause
10,583
0.02882
import errno import signal import os import socket import time SERVER_ADDRESS = (HOST, PORT) = '', 8888 REQUEST_QUEUE_SIZE = 1024 def grim_reaper(signum, frame): while True: try: pid, status = os.waitpid( -1, os.WNOHANG, ) except OSError: return if pid == 0: return def handle_request(client_connection): request = client_connection.recv(1024) print 'Child PID: {pid}. Parent PID: {ppid}'.format(pid=os.getpid(), ppid=os.getppid()) print request.decode() http_response = '''\ HTTP/1.1 200 OK Hello World! ''' client_connection.sendall(http_response) time.sleep(3) def serve_forever(): listen_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) listen_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) listen_socket.bind(SERVER_ADDRESS) listen_socket.listen(REQUEST_QUEUE_SIZE) print 'Serving HTTP on port {port} ...'.format(port=PORT) print 'Parent PID (PPID): {pid}\n'.format(pid=os.getpid()) signal.signal(signal.SIGCHLD, grim_reaper) while True: try: client_connection, client_address = listen_socket.accept() except IOError as e: code, msg = e.args if code == errno.EINTR: continue else: raise pid = os.fork() if pid == 0: listen_socket.close() handle_request(client_connection) client_connection.close() os._exit(0) else: client_connection.close() if __name__ == '__main__': serve_forever()
zenanhu/pluto
hydra/server1.py
Python
apache-2.0
1,675
0.001194
#!/usr/bin/env python #::::::::::::::::::::::::::::::::::::::::::::::::::::: #Author: Damiano Barboni <damianobarboni@gmail.com> #Version: 0.1 #Description: Script used to test bandsharing.py #Changelog: Wed Jun 11 12:07:33 CEST 2014 # First test version # #::::::::::::::::::::::::::::::::::::::::::::::::::::: import os import sys import shutil import unittest current_path = os.path.realpath( __file__ ).split( os.path.basename(__file__) )[0] bandsharing_path = os.path.abspath(os.path.join( current_path, os.pardir)) sys.path.insert(1, bandsharing_path) class TestBandSharing( unittest.TestCase ): def setUp(self): pass def test_bs( self ): pass def test_csv( self ): pass def makeSuite(): suite = unittest.TestSuite() suite.addTest( TestBandSharing( 'test_bs' ) ) suite.addTest( TestBandSharing( 'test_csv' ) ) return suite if __name__ == "__main__": unittest.TextTestRunner(verbosity=3).run(makeSuite())
beardbig/bandsharing
tests/bandsharing_tests.py
Python
gpl-2.0
1,065
0.030047
import scrapy import re from research.items import ResearchItem import sys reload(sys) sys.setdefaultencoding('utf-8') class CaltechSpider(scrapy.Spider): name = "WISC" allowed_domains = ["cs.wisc.edu"] start_urls = ["https://www.cs.wisc.edu/research/groups"] def parse(self, response): item = ResearchItem() for sel in response.xpath('//table[@class="views-table cols-2"]'): item['groupname'] = sel.xpath('caption/text()').extract()[0] item['proflist'] = [] for selp in sel.xpath('.//div[@class="views-field views-field-name-1"]/span/a'): tmpname = selp.xpath('text()').extract() print str(tmpname) item['proflist'].append(tmpname) yield item
doge-search/webdoge
liqian/WISC/research/research/spiders/WISCSpider.py
Python
unlicense
678
0.022124
#!/usr/bin/python # -*- coding: utf-8 -*- from flask import Flask, jsonify, request app = Flask(__name__) from charge.chargeManager import ChargeManager from data.dataProvider import DataProvider @app.route('/') def hello_world(): return jsonify(testPreMa(['棉花'],20)) @app.route('/result') def get_result(): name = request.args.get('name').encode('utf-8') print name return jsonify(testPreMa([name], 20)) def testPreMa(nameArray,period): for name in nameArray: print 'preMa----------------%s--%d周期-------------------' % (name, period) dp = DataProvider(name=name) p_list = dp.getData(['date', 'close']) cm = ChargeManager(p_list, period, nodeStat=False) cm.startCharge('preMa') return cm.resultJson() if __name__ == '__main__': app.run(host='localhost')
AlexYang1949/FuturesMeasure
restful/restfulApi.py
Python
mit
837
0.012063
from annoying.functions import get_object_or_None from django.contrib import messages from django.core.urlresolvers import reverse from django.db import transaction from django.db.models import Q from django.http.response import JsonResponse from django.http import HttpResponseRedirect from django.shortcuts import get_object_or_404, render from django.utils.decorators import method_decorator from django.utils.functional import SimpleLazyObject from django.views.decorators.csrf import csrf_protect from django.views.generic import ListView from django.views.generic.base import RedirectView, TemplateView from django.views.generic.detail import DetailView from django.views.generic.edit import CreateView, UpdateView from core.mixins import CustomLoginRequiredMixin, HeadOfDepartmentMixin from django.core.mail import EmailMessage from core.utils import WordFilter from projects import constants from projects.abstract_models import ProjectAttachment from projects.forms import ProjectNewForm, ProjectEditForm, ProjectContributeForm, ProjectQuestionForm, ProjectAnswerForm from projects.mixins import ApprovedProjectRequiredMixin, ProjectAddQuestionMixin from users.models import User from mixins import ProjectEditMixin from .models import Project, ProjectRole, ProjectRating, ProjectTechnicalRequest, ProjectFavorite class ProjectList(ListView): """ Display list of projects. If a project is unapproved, it will only be returned to HeadOfDepartment. """ model = Project template_name = 'projects/list.html' context_object_name = 'projects' ordering = ['-created'] class UserProjectList(ProjectList): """ Displays a list of user projects, by filtering the project current list. """ # TODO If any project is unapproved, it will only be visible to # HeadOfDepartment template_name = 'projects/user-list.html' def get_queryset(self): return self.model.objects.filter(user=self.kwargs['user']) def get_context_data(self, **kwargs): context = super(UserProjectList, self).get_context_data(**kwargs) if type(self.kwargs['user']) != SimpleLazyObject: context['user'] = get_object_or_404(User, id=self.kwargs['user']) return context class ProjectDetail(DetailView): """ Display project details. If a project is unapproved, it will only be returned to HeadOfDepartment. A 404 messages will be thrown otherwise. """ # TODO If unapproved, only visible to HeadOfDepartment. model = Project template_name = 'projects/detail.html' context_object_name = 'project' def get_object(self, queryset=None): obj = get_object_or_404(self.model, pk=self.kwargs['pk']) obj.increase_visits() obj.save() return obj def get_context_data(self, **kwargs): context = super(ProjectDetail, self).get_context_data(**kwargs) context['user_project_rating'] = \ get_object_or_None(ProjectRating, project=self.kwargs['pk'], user=self.request.user) \ if self.request.user.is_authenticated() else None context['user_project_role'] = \ get_object_or_None(ProjectRole, project=self.kwargs['pk'], user=self.request.user) return context class SearchProject(ListView): """ Display project search. If an user input a project's title, it shows the project. Else if, it shows No Results """ model = Project context_object_name = 'projects' template_name = 'projects/list.html' def get_queryset(self): filter = self.kwargs['title'] search = self.model.objects.filter(title__icontains=filter) return search class ProjectNewView(CustomLoginRequiredMixin, CreateView): template_name = 'projects/new.html' form_class = ProjectNewForm success_url = 'projects:pending-approval' def _create_owner_role(self, project, user): """ Owner is always a scientist of the project """ project_role = ProjectRole(project=project, user=user, role=constants.ProjectRoles.SCIENTIST, approved_role=True) project_role.save() def form_valid(self, form): project = form.save(commit=False) project.user = self.request.user form.instance.title = WordFilter().clean(form.instance.title) form.instance.body = WordFilter().clean(form.instance.body) project.save() # save attachments for each in form.cleaned_data['attachments']: ProjectAttachment.objects.create(project=project, object=each) # create owner role self._create_owner_role(project, project.user) messages.success(self.request, project.title) return HttpResponseRedirect(reverse(self.success_url)) class ProjectPendingApproval(TemplateView): """ Basic view to display that a project is "now pending to be approved". """ template_name = 'projects/pending-approval.html' class ProjectApproveList(HeadOfDepartmentMixin, ListView): """ Displays a list of projects pending to be approved. Only accessible through a HeadOfDepartment account. """ model = Project template_name = 'projects/approve-list.html' context_object_name = 'projects' def get_queryset(self): return self.model.objects.filter(awaiting_approval=True) class ProjectApproveDeny(HeadOfDepartmentMixin, RedirectView): """ Generic redirect view to handle the action approve / deny, which can only be accessed through a HeadOfDepartment account. """ pattern_name = 'projects:approve-list' approve = False def get_redirect_url(self, **kwargs): project = get_object_or_404(Project, pk=kwargs['pk']) project.awaiting_approval = False project.approved = self.approve project.save() return reverse(self.pattern_name) class ProjectApproveView(ProjectApproveDeny): approve = True class ProjectDenyView(ProjectApproveDeny): approve = False class ProjectContributeView(CustomLoginRequiredMixin, ApprovedProjectRequiredMixin, CreateView): """ Form through which any logged in user can request to participate in an approved project. If the project has already received a petition from the user, and it is still pending to be reviewed it will ignore the petition. """ template_name = 'projects/contribute.html' form_class = ProjectContributeForm success_url = 'projects:contribute' def get_initial(self): return { 'project': self.kwargs['pk'], } def form_valid(self, form): if self._previous_role_petition(): url_contribute = reverse('projects:contribute', kwargs={'pk': self.kwargs['pk']}) return HttpResponseRedirect(url_contribute) project_role = form.save(commit=False) project_role.user = self.request.user project_role.save() url_project = reverse(self.success_url, kwargs={'pk': self.kwargs['pk']}) return HttpResponseRedirect(url_project) def _previous_role_petition(self): """ No duplicate role petitions per project can be recorded. Will return to the contribute view if so. """ petition = get_object_or_None(ProjectRole, user=self.request.user, project=self.kwargs['pk']) return petition is not None def get_context_data(self, **kwargs): context = super(ProjectContributeView, self).get_context_data(**kwargs) project = Project.objects.get(pk=self.kwargs['pk']) project_role = get_object_or_None(project.projectrole_set, user=self.request.user) context['project'] = project context['project_role'] = project_role return context class ProjectApproveContributionList(CustomLoginRequiredMixin, UserProjectList): """ Displays a list of user projects, that have users who are pending a contribution approval. """ # TODO project owner required template_name = 'projects/approve-contribution-list.html' context_object_name = 'projectroles' def get_queryset(self): # retrieve projects from the logged in user self.kwargs['user'] = self.request.user user_projects = super(ProjectApproveContributionList, self).get_queryset() # set up filters for an OR unique query by projects and not approved yet filters = reduce(lambda q, x: q | Q(project=x), user_projects, Q()) filters = (filters) & Q(approved_role=False) return ProjectRole.objects.filter(filters) class ProjectContributionApproveDeny(RedirectView): """ View that will handle approval / denial of project contribution. Only the project owner can do so. Views inheriting this one can make use of self.projectrole to handle further operations. """ # TODO project owner required # TODO POST request pattern_name = 'projects:approve-contributions-list' def __init__(self): self.projectrole = ProjectRole.objects.none() def get_redirect_url(self, **kwargs): project = get_object_or_404(Project, pk=kwargs['pk']) user = get_object_or_404(User, pk=kwargs['user']) self.projectrole = get_object_or_404(ProjectRole, project=project, user=user) return reverse(self.pattern_name) class ProjectContributionApproveView(ProjectContributionApproveDeny): """ View that will handle the approval of project, given that the project exists and the user is its owner. Approved attribute will be set to true. """ def get_redirect_url(self, **kwargs): redirect = super(ProjectContributionApproveView, self)\ .get_redirect_url(**kwargs) self.projectrole.approved_role = True self.projectrole.save() return redirect class ProjectContributionDenyView(ProjectContributionApproveDeny): """ View that will handle the denial of project, given that the project exists and the user is its owner. Petition will be removed. """ def get_redirect_url(self, **kwargs): redirect = super(ProjectContributionDenyView, self)\ .get_redirect_url(**kwargs) self.projectrole.delete() return redirect class ProjectEdit(ProjectEditMixin): # TODO not edit user template_name = 'projects/edit.html' context_object_name = 'project' form_class = ProjectEditForm def form_valid(self, form): # word filters form.instance.body = WordFilter().clean(form.instance.body) # save attachments for each in form.cleaned_data['attachments']: ProjectAttachment.objects.create(project=form.instance, object=each) return super(ProjectEdit, self).form_valid(form) def get_success_url(self): project = Project.objects.get(pk=self.kwargs['pk']) user = self.request.user email = EmailMessage('Simple Technical Request', 'Your project '+str(project.title)+' has been edited by '+ str(user.first_name) , to =[project.user]) email.send() return reverse('projects:detail', args=[self.kwargs['pk']]) class ProjectTechnicalRequestList(ListView): model = ProjectTechnicalRequest context_object_name = 'questions' template_name = 'projects/technical-requests.html' ordering = ['-created'] def get_queryset(self): return self.model.objects.filter(project=self.kwargs['project']) def get_context_data(self, **kwargs): context = super(ProjectTechnicalRequestList, self).get_context_data(**kwargs) context['project'] = get_object_or_404(Project, id=self.kwargs['project']) return context class ProjectTechnicalRequestNewView(ProjectAddQuestionMixin): template_name = 'projects/technical-request-new.html' form_class = ProjectQuestionForm def form_valid(self, form): form.instance.question = WordFilter().clean(form.instance.question) return super(ProjectTechnicalRequestNewView, self).form_valid(form) def get_success_url(self): return reverse('projects:technical-requests', args=[self.kwargs['project']]) def get_context_data(self, **kwargs): context = super(ProjectTechnicalRequestNewView, self)\ .get_context_data(**kwargs) context['project'] = get_object_or_404(Project, id=self.kwargs['project']) return context class ProjectTechnicalRequestAnswerDetail(DetailView): model = ProjectTechnicalRequest template_name = 'projects/technical-request-answer.html' context_object_name = 'question' def get_context_data(self, **kwargs): context = super(ProjectTechnicalRequestAnswerDetail, self)\ .get_context_data(**kwargs) return context class ProjectTechnicalRequestAnswerNewView(UpdateView): model = ProjectTechnicalRequest template_name = 'projects/technical-request-answer-new.html' context_object_name = 'question' form_class = ProjectAnswerForm def form_valid(self, form): form.instance.answer = WordFilter().clean(form.instance.answer) return super(ProjectTechnicalRequestAnswerNewView, self).form_valid(form) def get_success_url(self): question = ProjectTechnicalRequest.objects.get(pk=self.kwargs['pk']) question.replied = True question.save() return reverse('projects:technical-request-answer', args=[self.kwargs['project'], self.kwargs['pk']]) class VoteView(CustomLoginRequiredMixin, ApprovedProjectRequiredMixin, RedirectView): """ Handles a project vote (either up or down). """ pattern_name = 'projects:detail' def get_vote(self): self.project = get_object_or_404(Project, id=self.kwargs['pk']) self.user = self.request.user current_project_rating = get_object_or_None(ProjectRating, project=self.project, user=self.user) return current_project_rating if current_project_rating is not None \ else ProjectRating(project=self.project, user=self.user) def post(self, request, *args, **kwargs): project = get_object_or_404(Project, id=self.kwargs['pk']) return JsonResponse({'upvotes': project.upvotes, 'downvotes': project.downvotes}) def get_redirect_url(self, *args, **kwargs): return reverse(self.pattern_name, args=[self.kwargs['pk']]) class UpvoteView(VoteView): @method_decorator(csrf_protect) @transaction.atomic def post(self, request, *args, **kwargs): project_rating = self.get_vote() if project_rating.rating is None: # new vote => +1 total upvotes self.project.upvotes += 1 self.project.save() elif project_rating.is_downvoted(): # old vote => -1 total downvotes +1 total upvotes self.project.downvotes -= 1 self.project.upvotes += 1 self.project.save() project_rating.upvote() project_rating.save() return super(UpvoteView, self).post(request, *args, **kwargs) class DownvoteView(VoteView): @method_decorator(csrf_protect) @transaction.atomic def post(self, request, *args, **kwargs): project_rating = self.get_vote() if project_rating.rating is None: # new vote => +1 total downvotes self.project.downvotes += 1 self.project.save() elif project_rating.is_upvoted(): # old vote => -1 total upvotes +1 total downvotes self.project.upvotes -= 1 self.project.downvotes += 1 self.project.save() project_rating.downvote() project_rating.save() return super(DownvoteView, self).post(request, *args, **kwargs) class FavoritesView(ListView): template_name = 'projects/favorites.html' model = ProjectFavorite context_object_name = 'favorites' def queryset(self): return self.model.objects.filter(user=self.request.user) def favorite_create(request): if request.method == 'POST': project_id = request.POST.get('project') project = Project.objects.get(pk=project_id) user = request.user try: ProjectFavorite.objects.get(project=project, user=user) return render(request, 'projects/favorites.html', {'exist': '1', 'project': project.title}) except: favorite = ProjectFavorite(project=project, user=user) favorite.save() return render(request, 'projects/favorites.html', {'added': '1', 'project': project.title}) def favorite_delete(request): project_id = request.POST.get('project') project = Project.objects.get(pk=project_id) favorite = ProjectFavorite.objects.get(project=project, user=request.user) favorite.delete() return HttpResponseRedirect('/projects/favorites') class NotificationsView(ListView): template_name = 'projects/notifications.html' model = ProjectTechnicalRequest context_object_name = 'questions' def queryset(self): return self.model.objects.filter(to_user=self.request.user, replied=False) def revokeproject(request): project_id = request.POST.get('project') project = Project.objects.get(pk=project_id) projectrole = ProjectRole.objects.get(project=project, user= request.user) projectrole.delete() return HttpResponseRedirect('/projects/'+ project_id)
zurfyx/simple
simple/projects/views.py
Python
mit
17,984
0.001335
import numpy as np from scipy import sparse import statistics def metrics(data_file): output_str = '' dsms = [] # the first thing we do is load the csv file (file #,from,to) # into a DSM; we do this by converting the triples to a sparse matrix # dsm is the first-order DSM of dependencies dsm_initial = loadDsm(data_file) # calculate the visibility matrices for all path lengths dsms = raiseAllPowers(dsm_initial, -1) # data file name output_str = output_str + data_file + ',' # the final visibility matrix dsm_visibility = dsms[len(dsms) - 1] # number of files output_str = output_str + str(len(dsm_initial)) + ',' # now, get the fan-in and fan-out data and calculate the density and propagation cost [fi,fo] = getFiFo(dsm_initial) [vfi,vfo] = getFiFo(dsm_visibility) # get median values of vfi/vfo to put file counts into four buckets arr_vfo = np.array(vfo).flatten() arr_vfi = np.array(vfi).flatten() arr_fo = np.array(fo).flatten() arr_fi = np.array(fi).flatten() density = (np.count_nonzero(dsm_initial) / len(dsm_initial) / len(dsm_initial)) * 100 output_str = output_str + str(density / 100) + ',' propagation_cost = sum(arr_vfo) / (len(dsm_initial) * len(dsm_initial)) * 100 output_str = output_str + str(propagation_cost / 100) + ',' vfo_median = statistics.median(arr_vfo) vfi_median = statistics.median(list(filter(lambda x: x != 0, arr_vfi))) vfo_mean = statistics.mean(np.array(arr_vfo).flatten()) vfi_mean = statistics.mean(list(filter(lambda x: x != 0, arr_vfi))) vfi_mode = statistics.mode(arr_vfi) vfo_mode = statistics.mode(arr_vfo) fo_median = statistics.median(arr_fo) fi_median = statistics.median(arr_fi) control_size = 0 # high vfo, low vfi core_size = 0 # high vfo, high vfi peripheral_size = 0 # low vfo, low vfi shared_size = 0 # low vfo, high vfi for i, val in enumerate(vfi): # base the cutoff points on the visibility matrix rather than first-order matrix # otherwise, we'd use fi, fo, fi_median and fo_median if vfi[i] >= vfi_median and vfo[i] >= vfo_median: core_size += 1 elif vfi[i] < vfi_median and vfo[i] < vfo_median: peripheral_size += 1 elif vfi[i] <= vfi_median and vfo[i] > vfo_median: control_size += 1 elif vfi[i] > vfi_median and vfo[i] <= vfo_median: shared_size += 1 print('vfo mean: ', vfo_mean) print('vfi mean: ', vfi_mean) print('vfo median: ', vfo_median) print('vfi median: ', vfi_median) print('vfi mode: ', vfi_mode) print('fo median: ', fo_median) print('fi median: ', fi_median) print('core: ', core_size) print('peripheral: ', peripheral_size) print('shared: ', shared_size) print('control: ', control_size) print('vfi mode: ', vfi_mode) print('vfo mode ', vfo_mode) output_str = output_str + str(core_size) + ',' output_str = output_str + str(peripheral_size) + ',' output_str = output_str + str(shared_size) + ',' output_str = output_str + str(control_size) + ',' output_str = output_str + str(vfo_median) + ',' output_str = output_str + str(vfi_median) + ',' output_str = output_str + str(fo_median) + ',' output_str = output_str + str(fi_median) return output_str def raiseAllPowers(initial_matrix, max_paths): initial_matrix = sparse.csr_matrix(initial_matrix) initial_matrix.data.fill(1) done = 0 current_path_length = 0 matrices = [] if max_paths == -1: max_paths = 1000 matrices.append(initial_matrix) while done == 0 and current_path_length < max_paths: print('Calculating DSM for path length = ', current_path_length + 1) # square the current matrix matrix_squared = matrices[current_path_length] * matrices[current_path_length] # sum the matrix with the previous one matrix_squared = matrix_squared + matrices[current_path_length] # sponify the matrix, so that we converge matrix_squared.data.fill(1) # nnz elements print(len(matrix_squared.nonzero()[0]), len(matrices[current_path_length].nonzero()[0])) # when we've achieved the transitive closure of our matrix, we're done if len(matrix_squared.nonzero()[0]) == len(matrices[current_path_length].nonzero()[0]): done = 1 continue else: matrices.append(matrix_squared) current_path_length += 1 return matrices def getFiFo(dsmProp): FI = dsmProp.sum(axis=0) # sum over columns FO = dsmProp.sum(axis=1) # sum over rows FI = FI.transpose() return [FI, FO] # credit https://gist.github.com/kevinavery/9613505 def loadDsm(filename): DATA = np.loadtxt(filename, delimiter=',') dims = DATA.shape[1] - 1 shape = [np.max(DATA[:,i]) for i in range(dims)] M = np.zeros(shape=shape) for row in DATA: index = tuple(row[:-1] - 1) M.itemset(index, row[-1]) return M
almossawi/firefox-code-quality
scripts/codequality.py
Python
mpl-2.0
4,809
0.02121
import os DEBUG = True SITE_ID = 1 APP_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), '')) DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:', } } STATIC_URL = '/static/' # STATIC_ROOT = os.path.join(APP_ROOT, '../app_static') STATICFILES_DIRS = ( os.path.join(APP_ROOT, 'static'), ) INSTALLED_APPS = [ 'django.contrib.admin', 'django.contrib.admindocs', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.messages', 'django.contrib.sessions', 'django.contrib.staticfiles', 'django.contrib.sitemaps', 'django.contrib.sites', 'bower', 'bower.tests.test_app', ] SECRET_KEY = 'foobar' TEST_RUNNER = 'discover_runner.DiscoverRunner'
kitsunde/jack-bower
bower/tests/test_settings.py
Python
mit
783
0
""" Python script to create a new article in a given section id. """ import os import sys from zdesk import Zendesk from scripts import file_constants from colorama import init from colorama import Fore init() def _create_shell(section_id): # Get subdomain. try: subdomain = os.environ["ZENDESK_SUBDOMAIN"] url = file_constants.get_url_from_subdomain(subdomain) except KeyError: print(Fore.RED + "Please set the environment variable ZENDESK_SUBDOMAIN" + Fore.RESET) sys.exit(1) # Get username. try: username = os.environ["ZENDESK_USR"] except KeyError: print(Fore.RED + "Please set the environment variable ZENDESK_USR" + Fore.RESET) sys.exit(1) # Get password. try: password = os.environ["ZENDESK_PWD"] except KeyError: print(Fore.RED + "Please set the environment variable ZENDESK_PWD" + Fore.RESET) sys.exit(1) zendesk = Zendesk(url, username, password) # Add a temporary title and leave it in draft mode. new_article = {"article": {"title": "Temporary Title", "draft": True}} response = zendesk.help_center_section_article_create(id = section_id, data = new_article) # Report success. print('Successfully created the article.') # Create the article shell locally. article_id = response['article']['id'] _empty_article(str(article_id)) def _empty_article(article_id): article = "posts/" + article_id + "/index.html" title = "posts/" + article_id + "/title.html" if article_id.isdigit() and not os.path.isfile(article): # Makes the folder for the article and pictures to be placed in. os.makedirs('posts/' + article_id) # Create the article and title shell. open(article, 'a').close() open(title, 'a').close() # Provides the user with the location of the html file that was created. print "The article is located at " + article print "Enter the article's title at " + title elif os.path.isfile(article): print (Fore.RED + "Error: This article ID already exists: " + article_id + Fore.RESET) sys.exit(1) else: print (Fore.RED + "Error: This article ID is invalid: " + article_id + Fore.RESET) sys.exit(1) def main(): if len(sys.argv) != 2: print('Usage: python %s <section_id>' % sys.argv[0]) else: _create_shell(sys.argv[1])
aerofs/zendesk-help-center-backer
zendesk/create_new_post_shell.py
Python
bsd-3-clause
2,438
0.006563
import sys import matplotlib.pyplot as plt import numpy as np import sklearn.gaussian_process import sklearn.kernel_approximation import splitter from appx_gaussian_processes import appx_gp TRAINING_NUM = 1500 TESTING_NUM = 50000 ALPHA = .003 LENGTH_SCALE = 1 GAMMA = .5 / (LENGTH_SCALE ** 2) COMPONENTS = 100 def interval_in_box_from_line(box, line): x_min, x_max, y_min, y_max = box m, b = line x_min_y = m * x_min + b x_max_y = m * x_max + b y_min_x = (y_min - b) / m y_max_x = (y_max - b) / m endpoints = set() if y_min <= x_min_y <= y_max: endpoints.add((x_min, x_min_y)) if y_min <= x_max_y <= y_max: endpoints.add((x_max, x_max_y)) if x_min <= y_min_x <= x_max: endpoints.add((y_min_x, y_min)) if x_min <= y_max_x <= x_max: endpoints.add((y_max_x, y_max)) return endpoints def approximate_kernel(train_X, test_X): sampler = sklearn.kernel_approximation.RBFSampler(gamma=GAMMA, n_components=COMPONENTS) sampler.fit(train_X) appx_train_X = sampler.transform(train_X) appx_test_X = sampler.transform(test_X) return appx_train_X, appx_test_X def main(path_in): print('Loading data...') data = splitter.load(path_in) (train_X, train_y), (test_X, test_y) = splitter.split(data, TRAINING_NUM, TESTING_NUM) try: gp_sigmas = np.loadtxt('gp_preds.txt') assert gp_sigmas.shape == (TESTING_NUM,) except (FileNotFoundError, AssertionError): print('Fitting GP...') kernel = sklearn.gaussian_process.kernels.RBF( length_scale=LENGTH_SCALE) gp = sklearn.gaussian_process.GaussianProcessRegressor( kernel=kernel, alpha=ALPHA, copy_X_train=False) gp.fit(train_X, train_y) print('Predicting GP...') _, gp_sigmas = gp.predict(test_X, return_std=True) np.savetxt('gp_preds.txt', gp_sigmas) print('Approximating kernel...') appx_train_X, appx_test_X = approximate_kernel(train_X, test_X) print('Fitting approximate GP...') agp = appx_gp.AppxGaussianProcessRegressor(alpha=ALPHA) agp.fit(appx_train_X, train_y) print('Predicting approximate GP...') _, agp_sigmas = agp.predict(appx_test_X, return_std=True) print('Finding best fit...') best_fit = np.polyfit(gp_sigmas, agp_sigmas, 1) best_fit_box = (min(gp_sigmas), max(gp_sigmas), min(agp_sigmas), max(agp_sigmas)) best_fit_endpoints = interval_in_box_from_line(best_fit_box, best_fit) best_fit_xs, best_fit_ys = zip(*best_fit_endpoints) print('Plotting...') f = plt.figure() ax = f.add_subplot(111) sc = plt.scatter(gp_sigmas, agp_sigmas, s=.2, c=list(test_y)) plt.plot(best_fit_xs, best_fit_ys, color='red', label='Linear fit') plt.title(r'$\gamma = {:.4},$ #components$= {}$'.format(GAMMA, COMPONENTS)) plt.xlabel('GP uncertainty') plt.ylabel('Approximate GP uncertainty') plt.text(.975, .1, '$y = {:.4}x {:+.4}$'.format(*best_fit), horizontalalignment='right', verticalalignment='bottom', transform = ax.transAxes) colorbar = plt.colorbar(sc) colorbar.set_label('Redshift') plt.legend(loc='lower right') plt.show() if __name__ == '__main__': main(sys.argv[1])
alasdairtran/mclearn
projects/jakub/test_appx_gp.py
Python
bsd-3-clause
3,431
0.002332
"""Test interact and interactive.""" #----------------------------------------------------------------------------- # Copyright (C) 2014 The IPython Development Team # # Distributed under the terms of the BSD License. The full license is in # the file COPYING, distributed as part of this software. #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Imports #----------------------------------------------------------------------------- from __future__ import print_function from collections import OrderedDict import nose.tools as nt import IPython.testing.tools as tt # from IPython.core.getipython import get_ipython from IPython.html import widgets from IPython.html.widgets import interact, interactive, Widget, interaction from IPython.utils.py3compat import annotate #----------------------------------------------------------------------------- # Utility stuff #----------------------------------------------------------------------------- class DummyComm(object): comm_id = 'a-b-c-d' def send(self, *args, **kwargs): pass def close(self, *args, **kwargs): pass _widget_attrs = {} displayed = [] def setup(): _widget_attrs['comm'] = Widget.comm Widget.comm = DummyComm() _widget_attrs['_ipython_display_'] = Widget._ipython_display_ def raise_not_implemented(*args, **kwargs): raise NotImplementedError() Widget._ipython_display_ = raise_not_implemented def teardown(): for attr, value in _widget_attrs.items(): setattr(Widget, attr, value) def f(**kwargs): pass def clear_display(): global displayed displayed = [] def record_display(*args): displayed.extend(args) #----------------------------------------------------------------------------- # Actual tests #----------------------------------------------------------------------------- def check_widget(w, **d): """Check a single widget against a dict""" for attr, expected in d.items(): if attr == 'cls': nt.assert_is(w.__class__, expected) else: value = getattr(w, attr) nt.assert_equal(value, expected, "%s.%s = %r != %r" % (w.__class__.__name__, attr, value, expected) ) def check_widgets(container, **to_check): """Check that widgets are created as expected""" # build a widget dictionary, so it matches widgets = {} for w in container.children: widgets[w.description] = w for key, d in to_check.items(): nt.assert_in(key, widgets) check_widget(widgets[key], **d) def test_single_value_string(): a = u'hello' c = interactive(f, a=a) w = c.children[0] check_widget(w, cls=widgets.TextWidget, description='a', value=a, ) def test_single_value_bool(): for a in (True, False): c = interactive(f, a=a) w = c.children[0] check_widget(w, cls=widgets.CheckboxWidget, description='a', value=a, ) def test_single_value_dict(): for d in [ dict(a=5), dict(a=5, b='b', c=dict), ]: c = interactive(f, d=d) w = c.children[0] check_widget(w, cls=widgets.DropdownWidget, description='d', values=d, value=next(iter(d.values())), ) def test_single_value_float(): for a in (2.25, 1.0, -3.5): c = interactive(f, a=a) w = c.children[0] check_widget(w, cls=widgets.FloatSliderWidget, description='a', value=a, min= -a if a > 0 else 3*a, max= 3*a if a > 0 else -a, step=0.1, readout=True, ) def test_single_value_int(): for a in (1, 5, -3): c = interactive(f, a=a) nt.assert_equal(len(c.children), 1) w = c.children[0] check_widget(w, cls=widgets.IntSliderWidget, description='a', value=a, min= -a if a > 0 else 3*a, max= 3*a if a > 0 else -a, step=1, readout=True, ) def test_list_tuple_2_int(): with nt.assert_raises(ValueError): c = interactive(f, tup=(1,1)) with nt.assert_raises(ValueError): c = interactive(f, tup=(1,-1)) for min, max in [ (0,1), (1,10), (1,2), (-5,5), (-20,-19) ]: c = interactive(f, tup=(min, max), lis=[min, max]) nt.assert_equal(len(c.children), 2) d = dict( cls=widgets.IntSliderWidget, min=min, max=max, step=1, readout=True, ) check_widgets(c, tup=d, lis=d) def test_list_tuple_3_int(): with nt.assert_raises(ValueError): c = interactive(f, tup=(1,2,0)) with nt.assert_raises(ValueError): c = interactive(f, tup=(1,2,-1)) for min, max, step in [ (0,2,1), (1,10,2), (1,100,2), (-5,5,4), (-100,-20,4) ]: c = interactive(f, tup=(min, max, step), lis=[min, max, step]) nt.assert_equal(len(c.children), 2) d = dict( cls=widgets.IntSliderWidget, min=min, max=max, step=step, readout=True, ) check_widgets(c, tup=d, lis=d) def test_list_tuple_2_float(): with nt.assert_raises(ValueError): c = interactive(f, tup=(1.0,1.0)) with nt.assert_raises(ValueError): c = interactive(f, tup=(0.5,-0.5)) for min, max in [ (0.5, 1.5), (1.1,10.2), (1,2.2), (-5.,5), (-20,-19.) ]: c = interactive(f, tup=(min, max), lis=[min, max]) nt.assert_equal(len(c.children), 2) d = dict( cls=widgets.FloatSliderWidget, min=min, max=max, step=.1, readout=True, ) check_widgets(c, tup=d, lis=d) def test_list_tuple_3_float(): with nt.assert_raises(ValueError): c = interactive(f, tup=(1,2,0.0)) with nt.assert_raises(ValueError): c = interactive(f, tup=(-1,-2,1.)) with nt.assert_raises(ValueError): c = interactive(f, tup=(1,2.,-1.)) for min, max, step in [ (0.,2,1), (1,10.,2), (1,100,2.), (-5.,5.,4), (-100,-20.,4.) ]: c = interactive(f, tup=(min, max, step), lis=[min, max, step]) nt.assert_equal(len(c.children), 2) d = dict( cls=widgets.FloatSliderWidget, min=min, max=max, step=step, readout=True, ) check_widgets(c, tup=d, lis=d) def test_list_tuple_str(): values = ['hello', 'there', 'guy'] first = values[0] dvalues = OrderedDict((v,v) for v in values) c = interactive(f, tup=tuple(values), lis=list(values)) nt.assert_equal(len(c.children), 2) d = dict( cls=widgets.DropdownWidget, value=first, values=dvalues ) check_widgets(c, tup=d, lis=d) def test_list_tuple_invalid(): for bad in [ (), (5, 'hi'), ('hi', 5), ({},), (None,), ]: with nt.assert_raises(ValueError): print(bad) # because there is no custom message in assert_raises c = interactive(f, tup=bad) def test_defaults(): @annotate(n=10) def f(n, f=4.5, g=1): pass c = interactive(f) check_widgets(c, n=dict( cls=widgets.IntSliderWidget, value=10, ), f=dict( cls=widgets.FloatSliderWidget, value=4.5, ), g=dict( cls=widgets.IntSliderWidget, value=1, ), ) def test_default_values(): @annotate(n=10, f=(0, 10.), g=5, h={'a': 1, 'b': 2}, j=['hi', 'there']) def f(n, f=4.5, g=1, h=2, j='there'): pass c = interactive(f) check_widgets(c, n=dict( cls=widgets.IntSliderWidget, value=10, ), f=dict( cls=widgets.FloatSliderWidget, value=4.5, ), g=dict( cls=widgets.IntSliderWidget, value=5, ), h=dict( cls=widgets.DropdownWidget, values={'a': 1, 'b': 2}, value=2 ), j=dict( cls=widgets.DropdownWidget, values={'hi':'hi', 'there':'there'}, value='there' ), ) def test_default_out_of_bounds(): @annotate(f=(0, 10.), h={'a': 1}, j=['hi', 'there']) def f(f='hi', h=5, j='other'): pass c = interactive(f) check_widgets(c, f=dict( cls=widgets.FloatSliderWidget, value=5., ), h=dict( cls=widgets.DropdownWidget, values={'a': 1}, value=1, ), j=dict( cls=widgets.DropdownWidget, values={'hi':'hi', 'there':'there'}, value='hi', ), ) def test_annotations(): @annotate(n=10, f=widgets.FloatTextWidget()) def f(n, f): pass c = interactive(f) check_widgets(c, n=dict( cls=widgets.IntSliderWidget, value=10, ), f=dict( cls=widgets.FloatTextWidget, ), ) def test_priority(): @annotate(annotate='annotate', kwarg='annotate') def f(kwarg='default', annotate='default', default='default'): pass c = interactive(f, kwarg='kwarg') check_widgets(c, kwarg=dict( cls=widgets.TextWidget, value='kwarg', ), annotate=dict( cls=widgets.TextWidget, value='annotate', ), ) @nt.with_setup(clear_display) def test_decorator_kwarg(): with tt.monkeypatch(interaction, 'display', record_display): @interact(a=5) def foo(a): pass nt.assert_equal(len(displayed), 1) w = displayed[0].children[0] check_widget(w, cls=widgets.IntSliderWidget, value=5, ) @nt.with_setup(clear_display) def test_decorator_no_call(): with tt.monkeypatch(interaction, 'display', record_display): @interact def foo(a='default'): pass nt.assert_equal(len(displayed), 1) w = displayed[0].children[0] check_widget(w, cls=widgets.TextWidget, value='default', ) @nt.with_setup(clear_display) def test_call_interact(): def foo(a='default'): pass with tt.monkeypatch(interaction, 'display', record_display): ifoo = interact(foo) nt.assert_equal(len(displayed), 1) w = displayed[0].children[0] check_widget(w, cls=widgets.TextWidget, value='default', ) @nt.with_setup(clear_display) def test_call_interact_kwargs(): def foo(a='default'): pass with tt.monkeypatch(interaction, 'display', record_display): ifoo = interact(foo, a=10) nt.assert_equal(len(displayed), 1) w = displayed[0].children[0] check_widget(w, cls=widgets.IntSliderWidget, value=10, ) @nt.with_setup(clear_display) def test_call_decorated_on_trait_change(): """test calling @interact decorated functions""" d = {} with tt.monkeypatch(interaction, 'display', record_display): @interact def foo(a='default'): d['a'] = a return a nt.assert_equal(len(displayed), 1) w = displayed[0].children[0] check_widget(w, cls=widgets.TextWidget, value='default', ) # test calling the function directly a = foo('hello') nt.assert_equal(a, 'hello') nt.assert_equal(d['a'], 'hello') # test that setting trait values calls the function w.value = 'called' nt.assert_equal(d['a'], 'called') @nt.with_setup(clear_display) def test_call_decorated_kwargs_on_trait_change(): """test calling @interact(foo=bar) decorated functions""" d = {} with tt.monkeypatch(interaction, 'display', record_display): @interact(a='kwarg') def foo(a='default'): d['a'] = a return a nt.assert_equal(len(displayed), 1) w = displayed[0].children[0] check_widget(w, cls=widgets.TextWidget, value='kwarg', ) # test calling the function directly a = foo('hello') nt.assert_equal(a, 'hello') nt.assert_equal(d['a'], 'hello') # test that setting trait values calls the function w.value = 'called' nt.assert_equal(d['a'], 'called') def test_fixed(): c = interactive(f, a=widgets.fixed(5), b='text') nt.assert_equal(len(c.children), 1) w = c.children[0] check_widget(w, cls=widgets.TextWidget, value='text', description='b', ) def test_default_description(): c = interactive(f, b='text') w = c.children[0] check_widget(w, cls=widgets.TextWidget, value='text', description='b', ) def test_custom_description(): c = interactive(f, b=widgets.TextWidget(value='text', description='foo')) w = c.children[0] check_widget(w, cls=widgets.TextWidget, value='text', description='foo', )
WillisXChen/django-oscar
oscar/lib/python2.7/site-packages/IPython/html/widgets/tests/test_interaction.py
Python
bsd-3-clause
13,235
0.013827
# Copyright 2017 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """TensorFlow NMT model implementation.""" from __future__ import print_function import argparse import os import random import sys # import matplotlib.image as mpimg import numpy as np import time import tensorflow as tf from mlperf_compliance import mlperf_log import estimator from utils import evaluation_utils from utils import iterator_utils from utils import misc_utils as utils from utils import vocab_utils from variable_mgr import constants utils.check_tensorflow_version() FLAGS = None def add_arguments(parser): """Build ArgumentParser.""" parser.register("type", "bool", lambda v: v.lower() == "true") # network parser.add_argument( "--num_units", type=int, default=1024, help="Network size.") parser.add_argument( "--num_layers", type=int, default=4, help="Network depth.") parser.add_argument("--num_encoder_layers", type=int, default=None, help="Encoder depth, equal to num_layers if None.") parser.add_argument("--num_decoder_layers", type=int, default=None, help="Decoder depth, equal to num_layers if None.") parser.add_argument( "--encoder_type", type=str, default="gnmt", help="""\ uni | bi | gnmt. For bi, we build num_encoder_layers/2 bi-directional layers. For gnmt, we build 1 bi-directional layer, and (num_encoder_layers - 1) uni-directional layers.\ """) parser.add_argument( "--residual", type="bool", nargs="?", const=True, default=True, help="Whether to add residual connections.") parser.add_argument("--time_major", type="bool", nargs="?", const=True, default=True, help="Whether to use time-major mode for dynamic RNN.") parser.add_argument("--num_embeddings_partitions", type=int, default=0, help="Number of partitions for embedding vars.") # attention mechanisms parser.add_argument( "--attention", type=str, default="normed_bahdanau", help="""\ luong | scaled_luong | bahdanau | normed_bahdanau or set to "" for no attention\ """) parser.add_argument( "--attention_architecture", type=str, default="gnmt_v2", help="""\ standard | gnmt | gnmt_v2. standard: use top layer to compute attention. gnmt: GNMT style of computing attention, use previous bottom layer to compute attention. gnmt_v2: similar to gnmt, but use current bottom layer to compute attention.\ """) parser.add_argument( "--output_attention", type="bool", nargs="?", const=True, default=True, help="""\ Only used in standard attention_architecture. Whether use attention as the cell output at each timestep. .\ """) parser.add_argument( "--pass_hidden_state", type="bool", nargs="?", const=True, default=True, help="""\ Whether to pass encoder's hidden state to decoder when using an attention based model.\ """) # optimizer parser.add_argument( "--optimizer", type=str, default="adam", help="sgd | adam") parser.add_argument( "--learning_rate", type=float, default=5e-4, help="Learning rate. Adam: 0.001 | 0.0001") parser.add_argument("--warmup_steps", type=int, default=0, help="How many steps we inverse-decay learning.") parser.add_argument("--warmup_scheme", type=str, default="t2t", help="""\ How to warmup learning rates. Options include: t2t: Tensor2Tensor's way, start with lr 100 times smaller, then exponentiate until the specified lr.\ """) parser.add_argument( "--decay_scheme", type=str, default="", help="""\ How we decay learning rate. Options include: luong234: after 2/3 num train steps, we start halving the learning rate for 4 times before finishing. luong5: after 1/2 num train steps, we start halving the learning rate for 5 times before finishing.\ luong10: after 1/2 num train steps, we start halving the learning rate for 10 times before finishing.\ """) parser.add_argument( "--num_train_steps", type=int, default=100000, help="Num steps to train.") parser.add_argument( "--max_train_epochs", type=int, default=8, help="Max number of epochs.") parser.add_argument("--num_examples_per_epoch", type=int, default=4068191, help="Number of examples in one epoch") parser.add_argument( "--target_bleu", type=float, default=22.0, help="Target bleu.") parser.add_argument("--colocate_gradients_with_ops", type="bool", nargs="?", const=True, default=True, help=("Whether try colocating gradients with " "corresponding op")) parser.add_argument("--label_smoothing", type=float, default=0.1, help=("If nonzero, smooth the labels towards " "1/num_classes.")) # initializer parser.add_argument("--init_op", type=str, default="uniform", help="uniform | glorot_normal | glorot_uniform") parser.add_argument("--init_weight", type=float, default=0.1, help=("for uniform init_op, initialize weights " "between [-this, this].")) # data parser.add_argument( "--src", type=str, default="en", help="Source suffix, e.g., en.") parser.add_argument( "--tgt", type=str, default="de", help="Target suffix, e.g., de.") parser.add_argument( "--data_dir", type=str, default="", help="Training/eval data directory.") parser.add_argument( "--train_prefix", type=str, default="train.tok.clean.bpe.32000", help="Train prefix, expect files with src/tgt suffixes.") parser.add_argument( "--dev_prefix", type=str, default="newstest2014.tok.bpe.32000", help="Dev prefix, expect files with src/tgt suffixes.") parser.add_argument( "--test_prefix", type=str, default="newstest2014.tok.bpe.32000", help="Test prefix, expect files with src/tgt suffixes.") parser.add_argument( "--output_dir", type=str, default="", help="Store log/model files.") # Vocab parser.add_argument( "--vocab_prefix", type=str, default="vocab.bpe.32000", help="""\ Vocab prefix, expect files with src/tgt suffixes.\ """) parser.add_argument( "--embed_prefix", type=str, default=None, help="""\ Pretrained embedding prefix, expect files with src/tgt suffixes. The embedding files should be Glove formatted txt files.\ """) parser.add_argument("--sos", type=str, default="<s>", help="Start-of-sentence symbol.") parser.add_argument("--eos", type=str, default="</s>", help="End-of-sentence symbol.") parser.add_argument( "--share_vocab", type="bool", nargs="?", const=True, default=True, help="""\ Whether to use the source vocab and embeddings for both source and target.\ """) parser.add_argument("--check_special_token", type="bool", default=True, help="""\ Whether check special sos, eos, unk tokens exist in the vocab files.\ """) # Sequence lengths parser.add_argument( "--src_max_len", type=int, default=50, help="Max length of src sequences during training.") parser.add_argument( "--tgt_max_len", type=int, default=50, help="Max length of tgt sequences during training.") parser.add_argument("--src_max_len_infer", type=int, default=None, help="Max length of src sequences during inference.") parser.add_argument("--tgt_max_len_infer", type=int, default=80, help="""\ Max length of tgt sequences during inference. Also use to restrict the maximum decoding length.\ """) # Default settings works well (rarely need to change) parser.add_argument("--unit_type", type=str, default="lstm", help="lstm | gru | layer_norm_lstm | nas") parser.add_argument("--forget_bias", type=float, default=1.0, help="Forget bias for BasicLSTMCell.") parser.add_argument("--dropout", type=float, default=0.2, help="Dropout rate (not keep_prob)") parser.add_argument("--max_gradient_norm", type=float, default=5.0, help="Clip gradients to this norm.") parser.add_argument("--batch_size", type=int, default=128, help="Batch size.") parser.add_argument("--steps_per_stats", type=int, default=5, help=("How many training steps to do per stats logging." "Save checkpoint every 10x steps_per_stats")) parser.add_argument("--max_train", type=int, default=0, help="Limit on the size of training data (0: no limit).") parser.add_argument( "--num_buckets", type=int, default=1, help="Put data into similar-length buckets.") # SPM parser.add_argument("--subword_option", type=str, default="bpe", choices=["", "bpe", "spm"], help="""\ Set to bpe or spm to activate subword desegmentation.\ """) # Experimental encoding feature. parser.add_argument("--use_char_encode", type="bool", default=False, help="""\ Whether to split each word or bpe into character, and then generate the word-level representation from the character reprentation. """) # Misc parser.add_argument( "--save_checkpoints_steps", type=int, default=1000, help="save_checkpoints_steps") parser.add_argument( "--num_gpus", type=int, default=1, help="Number of gpus in each worker.") parser.add_argument( "--log_device_placement", type="bool", nargs="?", const=True, default=True, help="Debug GPU allocation.") parser.add_argument("--steps_per_external_eval", type=int, default=None, help="""\ How many training steps to do per external evaluation. Automatically set based on data if None.\ """) parser.add_argument("--hparams_path", type=str, default=None, help=("Path to standard hparams json file that overrides" "hparams values from FLAGS.")) parser.add_argument( "--random_seed", type=int, default=1, help="Random seed (>0, set a specific seed).") parser.add_argument("--override_loaded_hparams", type="bool", nargs="?", const=True, default=False, help="Override loaded hparams with values specified") parser.add_argument("--num_keep_ckpts", type=int, default=5, help="Max number of checkpoints to keep.") parser.add_argument("--avg_ckpts", type="bool", nargs="?", const=True, default=False, help=("""\ Average the last N checkpoints for external evaluation. N can be controlled by setting --num_keep_ckpts.\ """)) parser.add_argument("--language_model", type="bool", nargs="?", const=True, default=False, help="True to train a language model, ignoring encoder") # Inference parser.add_argument("--ckpt", type=str, default="", help="Checkpoint file to load a model for inference.") parser.add_argument("--inference_input_file", type=str, default=None, help="Set to the text to decode.") parser.add_argument("--inference_list", type=str, default=None, help=("A comma-separated list of sentence indices " "(0-based) to decode.")) parser.add_argument( "--infer_batch_size", type=int, default=64, help="Batch size for inference mode.") parser.add_argument("--detokenizer_file", type=str, default="", help=("""Detokenizer script file.""")) parser.add_argument("--use_borg", type="bool", default=False) # Advanced inference arguments parser.add_argument("--infer_mode", type=str, default="beam_search", choices=["greedy", "sample", "beam_search"], help="Which type of decoder to use during inference.") parser.add_argument("--beam_width", type=int, default=5, help=("""\ beam width when using beam search decoder. If 0 (default), use standard decoder with greedy helper.\ """)) parser.add_argument( "--length_penalty_weight", type=float, default=0.6, help="Length penalty for beam search.") parser.add_argument( "--coverage_penalty_weight", type=float, default=0.1, help="Coverage penalty for beam search.") parser.add_argument("--sampling_temperature", type=float, default=0.0, help=("""\ Softmax sampling temperature for inference decoding, 0.0 means greedy decoding. This option is ignored when using beam search.\ """)) parser.add_argument("--num_translations_per_input", type=int, default=1, help=("""\ Number of translations generated for each sentence. This is only used for inference.\ """)) # Job info parser.add_argument("--jobid", type=int, default=0, help="Task id of the worker.") parser.add_argument("--num_workers", type=int, default=1, help="Number of workers (inference only).") parser.add_argument("--num_inter_threads", type=int, default=0, help="number of inter_op_parallelism_threads") parser.add_argument("--num_intra_threads", type=int, default=0, help="number of intra_op_parallelism_threads") # Fp16 parser.add_argument("--use_fp16", type="bool", default=True, help="use_fp16 for training and inference") parser.add_argument( "--fp16_loss_scale", type=float, default=128, help="If fp16 is enabled, the loss is multiplied by this amount " "right before gradients are computed, then each gradient " "is divided by this amount. Mathematically, this has no " "effect, but it helps avoid fp16 underflow. Set to 1 to " "effectively disable.") parser.add_argument( "--enable_auto_loss_scale", type="bool", default=False, help="If True and use_fp16 is True, automatically adjust the " "loss scale during training.") parser.add_argument( "--fp16_inc_loss_scale_every_n", type=int, default=1000, help="If fp16 is enabled and enable_auto_loss_scale is " "True, increase the loss scale every n steps.") parser.add_argument( "--check_tower_loss_numerics", type="bool", default=False, # Set to false for xla.compile() help="whether to check tower loss numerics") parser.add_argument( "--use_fp32_batch_matmul", type="bool", default=True, help="Whether to use fp32 batch matmul") # Performance # XLA parser.add_argument( "--force_inputs_padding", type="bool", default=False, help="Force padding input batch to src_max_len and tgt_max_len") parser.add_argument( "--use_xla", type="bool", default=False, help="Use xla to compile a few selected locations, mostly Defuns.") parser.add_argument( "--use_xla_compile", type="bool", default=False, help="Use xla.compile() for each tower's fwd and bak pass.") parser.add_argument( "--use_autojit_xla", type="bool", default=False, help="Use auto jit xla.") # GPU knobs parser.add_argument( "--use_pintohost_optimizer", type="bool", default=False, help="whether to use PinToHost optimizer") parser.add_argument( "--use_cudnn_lstm", type="bool", default=False, help="whether to use cudnn_lstm for encoder, non residual layers") parser.add_argument( "--use_loose_bidi_cudnn_lstm", type="bool", default=False, help="whether to use loose bidi cudnn_lstm") parser.add_argument( "--use_fused_lstm", type="bool", default=False, help="whether to use fused lstm and variant. If enabled, training will " "use LSTMBlockFusedCell, infer will use LSTMBlockCell when appropriate.") parser.add_argument( "--use_fused_lstm_dec", type="bool", default=False, help="whether to use fused lstm for decoder (training only).") parser.add_argument( "--gpu_indices", type=str, default="", help="Indices of worker GPUs in ring order") parser.add_argument( "--gpu_thread_mode", type=str, default="global", help="Methods to assign GPU host work to threads. " "global: all GPUs and CPUs share the same global threads; " "gpu_private: a private threadpool for each GPU; " "gpu_shared: all GPUs share the same threadpool.") parser.add_argument( "--per_gpu_thread_count", type=int, default=0, help="The number of threads to use for GPU. Only valid when " "gpu_thread_mode is not global.") parser.add_argument( "--sync_on_finish", type="bool", default=False, help="Enable/disable whether the devices are synced after each " "step.") parser.add_argument( "--force_gpu_compatible", type="bool", default=False, help="whether to enable force_gpu_compatible in GPU_Options") # Graph knobs parser.add_argument("--parallel_iterations", type=int, default=10, help="number of parallel iterations in dynamic_rnn") parser.add_argument("--use_dist_strategy", type="bool", default=False, help="whether to use distribution strategy") parser.add_argument( "--hierarchical_copy", type="bool", default=False, help="Use hierarchical copies. Currently only optimized for " "use on a DGX-1 with 8 GPUs and may perform poorly on " "other hardware. Requires --num_gpus > 1, and only " "recommended when --num_gpus=8") parser.add_argument( "--network_topology", type=constants.NetworkTopology, default=constants.NetworkTopology.DGX1, choices=list(constants.NetworkTopology)) parser.add_argument( "--enable_layout_optimizer", type="bool", default=False, help="whether to enable layout optimizer") parser.add_argument( "--use_block_lstm", type="bool", default=False, help="whether to use block lstm") parser.add_argument( "--use_defun", type="bool", default=False, help="whether to use Defun") # Gradient tricks parser.add_argument( "--gradient_repacking", type=int, default=0, help="Use gradient repacking. It" "currently only works with replicated mode. At the end of" "of each step, it repacks the gradients for more efficient" "cross-device transportation. A non-zero value specifies" "the number of split packs that will be formed.") parser.add_argument( "--compact_gradient_transfer", type="bool", default=True, help="Compact gradient as much as possible for cross-device transfer and " "aggregation.") parser.add_argument( "--all_reduce_spec", type=str, default="nccl", help="A specification of the all_reduce algorithm to be used " "for reducing gradients. For more details, see " "parse_all_reduce_spec in variable_mgr.py. An " "all_reduce_spec has BNF form:\n" "int ::= positive whole number\n" "g_int ::= int[KkMGT]?\n" "alg_spec ::= alg | alg#int\n" "range_spec ::= alg_spec | alg_spec/alg_spec\n" "spec ::= range_spec | range_spec:g_int:range_spec\n" "NOTE: not all syntactically correct constructs are " "supported.\n\n" "Examples:\n " "\"xring\" == use one global ring reduction for all " "tensors\n" "\"pscpu\" == use CPU at worker 0 to reduce all tensors\n" "\"nccl\" == use NCCL to locally reduce all tensors. " "Limited to 1 worker.\n" "\"nccl/xring\" == locally (to one worker) reduce values " "using NCCL then ring reduce across workers.\n" "\"pscpu:32k:xring\" == use pscpu algorithm for tensors of " "size up to 32kB, then xring for larger tensors.") parser.add_argument( "--agg_small_grads_max_bytes", type=int, default=0, help="If > 0, try to aggregate tensors of less than this " "number of bytes prior to all-reduce.") parser.add_argument( "--agg_small_grads_max_group", type=int, default=10, help="When aggregating small tensors for all-reduce do not " "aggregate more than this many into one new tensor.") parser.add_argument( "--allreduce_merge_scope", type=int, default=1, help="Establish a name scope around this many " "gradients prior to creating the all-reduce operations. " "It may affect the ability of the backend to merge " "parallel ops.") # Other knobs parser.add_argument( "--local_parameter_device", type=str, default="gpu", help="Device to use as parameter server: cpu or gpu. For " "distributed training, it can affect where caching of " "variables happens.") parser.add_argument( "--autotune_threshold", type=int, default=0, help="The autotune threshold for the models") parser.add_argument( "--datasets_num_private_threads", type=int, default=None, help="Number of threads for a private threadpool created for " "all datasets computation. By default, we pick an " "appropriate number. If set to 0, we use the default " "tf-Compute threads for dataset operations.") parser.add_argument( "--winograd_nonfused", type="bool", default=True, help="Enable/disable using the Winograd non-fused algorithms.") parser.add_argument( "--batchnorm_persistent", type="bool", default=True, help="Enable/disable using the CUDNN_BATCHNORM_SPATIAL_PERSISTENT " "mode for batchnorm.") parser.add_argument( "--device", type=str, default="gpu", help="Device to use for computation: cpu or gpu") parser.add_argument( "--allow_growth", type="bool", default=False, help="whether to enable allow_growth in GPU_Options") parser.add_argument( "--use_resource_vars", type="bool", default=False, help="Use resource variables instead of normal variables. " "Resource variables are slower, but this option is useful " "for debugging their performance.") # Performance tuning specific to MKL. parser.add_argument( "--mkl", type="bool", default=False, help="If true, set MKL environment variables.") parser.add_argument( "--kmp_blocktime", type=int, default=30, help="The time, in milliseconds, that a thread should wait, " "after completing the execution of a parallel region, " "before sleeping") parser.add_argument( "--kmp_affinity", type=str, default="granularity=fine,verbose,compact,1,0", help="Restricts execution of certain threads (virtual execution " "units) to a subset of the physical processing units in a " "multiprocessor computer.") parser.add_argument( "--kmp_settings", type=int, default=1, help="If set to 1, MKL settings will be printed.") # Debug parser.add_argument("--debug", type="bool", default=False, help="Debug train and eval") parser.add_argument("--show_metrics", type="bool", default=True, help="whether to show detailed metrics") parser.add_argument("--build_graph_only", type="bool", default=False, help="whehter or not just building the graph") parser.add_argument("--clip_grads", type="bool", default=True, help="whether to clip gradients") parser.add_argument("--profile", type="bool", default=False, help="If generate profile") parser.add_argument("--profile_save_steps", type=int, default=10, help="Save timeline every N steps.") # TPU parser.add_argument("--use_dynamic_rnn", type="bool", default=True) parser.add_argument("--master", type=str, default="") parser.add_argument("--use_synthetic_data", type="bool", default=False) parser.add_argument( "--iterations_per_loop", type=int, default=100, help="the number of iterations to run on TPU before returning to host") parser.add_argument( "--mode", type=str, default="train_and_eval", choices=["train", "train_and_eval", "infer"]) parser.add_argument( "--run_name", type=str, default="", help= "if set, load ckpt from /gs://ij-d/home/mlperf-nmt/'run_name'" ) def create_hparams(flags): """Create training hparams.""" return tf.contrib.training.HParams( # Data src=flags.src, tgt=flags.tgt, train_prefix=os.path.join(flags.data_dir, flags.train_prefix), dev_prefix=os.path.join(flags.data_dir, flags.dev_prefix), test_prefix=os.path.join(flags.data_dir, flags.test_prefix), vocab_prefix=os.path.join(flags.data_dir, flags.vocab_prefix), embed_prefix=flags.embed_prefix, output_dir=flags.output_dir, # Networks num_units=flags.num_units, num_encoder_layers=(flags.num_encoder_layers or flags.num_layers), num_decoder_layers=(flags.num_decoder_layers or flags.num_layers), dropout=flags.dropout, unit_type=flags.unit_type, encoder_type=flags.encoder_type, residual=flags.residual, time_major=flags.time_major, num_embeddings_partitions=flags.num_embeddings_partitions, # Attention mechanisms attention=flags.attention, attention_architecture=flags.attention_architecture, output_attention=flags.output_attention, pass_hidden_state=flags.pass_hidden_state, # Train optimizer=flags.optimizer, num_train_steps=flags.num_train_steps, max_train_epochs=flags.max_train_epochs, num_examples_per_epoch=flags.num_examples_per_epoch, target_bleu=flags.target_bleu, label_smoothing=flags.label_smoothing, batch_size=flags.batch_size, init_op=flags.init_op, init_weight=flags.init_weight, max_gradient_norm=flags.max_gradient_norm, learning_rate=flags.learning_rate, warmup_steps=flags.warmup_steps, warmup_scheme=flags.warmup_scheme, decay_scheme=flags.decay_scheme, colocate_gradients_with_ops=flags.colocate_gradients_with_ops, # Data constraints num_buckets=flags.num_buckets, max_train=flags.max_train, src_max_len=flags.src_max_len, tgt_max_len=flags.tgt_max_len, # Inference src_max_len_infer=flags.src_max_len_infer, tgt_max_len_infer=flags.tgt_max_len_infer, infer_batch_size=flags.infer_batch_size, detokenizer_file=flags.detokenizer_file, use_borg=flags.use_borg, # Advanced inference arguments infer_mode=flags.infer_mode, beam_width=flags.beam_width, length_penalty_weight=flags.length_penalty_weight, coverage_penalty_weight=flags.coverage_penalty_weight, sampling_temperature=flags.sampling_temperature, num_translations_per_input=flags.num_translations_per_input, # Vocab sos=flags.sos if flags.sos else vocab_utils.SOS, eos=flags.eos if flags.eos else vocab_utils.EOS, subword_option=flags.subword_option, check_special_token=flags.check_special_token, use_char_encode=flags.use_char_encode, # Misc forget_bias=flags.forget_bias, num_gpus=flags.num_gpus, save_checkpoints_steps=flags.save_checkpoints_steps, epoch_step=0, # record where we were within an epoch. steps_per_stats=flags.steps_per_stats, steps_per_external_eval=flags.steps_per_external_eval, share_vocab=flags.share_vocab, log_device_placement=flags.log_device_placement, random_seed=flags.random_seed, override_loaded_hparams=flags.override_loaded_hparams, num_keep_ckpts=flags.num_keep_ckpts, avg_ckpts=flags.avg_ckpts, language_model=flags.language_model, num_intra_threads=flags.num_intra_threads, num_inter_threads=flags.num_inter_threads, # Fp16 use_fp16=flags.use_fp16, fp16_loss_scale=flags.fp16_loss_scale, enable_auto_loss_scale=flags.enable_auto_loss_scale, fp16_inc_loss_scale_every_n=flags.fp16_inc_loss_scale_every_n, check_tower_loss_numerics=flags.check_tower_loss_numerics, use_fp32_batch_matmul=flags.use_fp32_batch_matmul, # Performance # GPU knbs force_inputs_padding=flags.force_inputs_padding, use_xla=flags.use_xla, use_xla_compile=flags.use_xla_compile, use_autojit_xla=flags.use_autojit_xla, use_pintohost_optimizer=flags.use_pintohost_optimizer, use_cudnn_lstm=flags.use_cudnn_lstm, use_loose_bidi_cudnn_lstm=flags.use_loose_bidi_cudnn_lstm, use_fused_lstm=flags.use_fused_lstm, use_fused_lstm_dec=flags.use_fused_lstm_dec, gpu_indices=flags.gpu_indices, gpu_thread_mode=flags.gpu_thread_mode, per_gpu_thread_count=flags.per_gpu_thread_count, sync_on_finish=flags.sync_on_finish, force_gpu_compatible=flags.force_gpu_compatible, # Graph knobs parallel_iterations=flags.parallel_iterations, use_dynamic_rnn=flags.use_dynamic_rnn, use_dist_strategy=flags.use_dist_strategy, hierarchical_copy=flags.hierarchical_copy, network_topology=flags.network_topology, enable_layout_optimizer=flags.enable_layout_optimizer, use_block_lstm=flags.use_block_lstm, # Grad tricks gradient_repacking=flags.gradient_repacking, compact_gradient_transfer=flags.compact_gradient_transfer, all_reduce_spec=flags.all_reduce_spec, agg_small_grads_max_bytes=flags.agg_small_grads_max_bytes, agg_small_grads_max_group=flags.agg_small_grads_max_group, allreduce_merge_scope=flags.allreduce_merge_scope, # Other knobs local_parameter_device=("cpu" if flags.num_gpus ==0 else flags.local_parameter_device), autotune_threshold=flags.autotune_threshold, datasets_num_private_threads=flags.datasets_num_private_threads, winograd_nonfused=flags.winograd_nonfused, batchnorm_persistent=flags.batchnorm_persistent, device=flags.device, allow_growth=flags.allow_growth, use_resource_vars=flags.use_resource_vars, mkl=flags.mkl, kmp_blocktime=flags.kmp_blocktime, kmp_affinity=flags.kmp_affinity, kmp_settings=flags.kmp_settings, # Debug debug=flags.debug, build_graph_only=flags.build_graph_only, clip_grads=flags.clip_grads, profile=flags.profile, profile_save_steps=flags.profile_save_steps, show_metrics=flags.show_metrics, # TPU master=flags.master, use_synthetic_data=flags.use_synthetic_data, iterations_per_loop=flags.iterations_per_loop, mode=flags.mode, run_name=flags.run_name) def _add_argument(hparams, key, value, update=True): """Add an argument to hparams; if exists, change the value if update==True.""" if hasattr(hparams, key): if update: setattr(hparams, key, value) else: hparams.add_hparam(key, value) def extend_hparams(hparams): """Add new arguments to hparams.""" # Sanity checks if hparams.encoder_type == "bi" and hparams.num_encoder_layers % 2 != 0: raise ValueError("For bi, num_encoder_layers %d should be even" % hparams.num_encoder_layers) if (hparams.attention_architecture in ["gnmt"] and hparams.num_encoder_layers < 2): raise ValueError("For gnmt attention architecture, " "num_encoder_layers %d should be >= 2" % hparams.num_encoder_layers) if hparams.subword_option and hparams.subword_option not in ["spm", "bpe"]: raise ValueError("subword option must be either spm, or bpe") if hparams.infer_mode == "beam_search" and hparams.beam_width <= 0: raise ValueError("beam_width must greater than 0 when using beam_search" "decoder.") if hparams.infer_mode == "sample" and hparams.sampling_temperature <= 0.0: raise ValueError("sampling_temperature must greater than 0.0 when using" "sample decoder.") # Different number of encoder / decoder layers assert hparams.num_encoder_layers and hparams.num_decoder_layers if hparams.num_encoder_layers != hparams.num_decoder_layers: hparams.pass_hidden_state = False utils.print_out("Num encoder layer %d is different from num decoder layer" " %d, so set pass_hidden_state to False" % ( hparams.num_encoder_layers, hparams.num_decoder_layers)) # Set residual layers num_encoder_residual_layers = 0 num_decoder_residual_layers = 0 if hparams.residual: if hparams.num_encoder_layers > 1: num_encoder_residual_layers = hparams.num_encoder_layers - 1 if hparams.num_decoder_layers > 1: num_decoder_residual_layers = hparams.num_decoder_layers - 1 if hparams.encoder_type == "gnmt": # The first unidirectional layer (after the bi-directional layer) in # the GNMT encoder can't have residual connection due to the input is # the concatenation of fw_cell and bw_cell's outputs. num_encoder_residual_layers = hparams.num_encoder_layers - 2 # Compatible for GNMT models if hparams.num_encoder_layers == hparams.num_decoder_layers: num_decoder_residual_layers = num_encoder_residual_layers _add_argument(hparams, "num_encoder_residual_layers", num_encoder_residual_layers) _add_argument(hparams, "num_decoder_residual_layers", num_decoder_residual_layers) # Language modeling if hparams.language_model: hparams.attention = "" hparams.attention_architecture = "" hparams.pass_hidden_state = False hparams.share_vocab = True hparams.src = hparams.tgt utils.print_out("For language modeling, we turn off attention and " "pass_hidden_state; turn on share_vocab; set src to tgt.") ## Vocab # Get vocab file names first if hparams.vocab_prefix: src_vocab_file = hparams.vocab_prefix + "." + hparams.src tgt_vocab_file = hparams.vocab_prefix + "." + hparams.tgt else: raise ValueError("hparams.vocab_prefix must be provided.") # Source vocab src_vocab_size, src_vocab_file = vocab_utils.check_vocab( src_vocab_file, hparams.output_dir, check_special_token=hparams.check_special_token, sos=hparams.sos, eos=hparams.eos, unk=vocab_utils.UNK) # Target vocab if hparams.share_vocab: utils.print_out(" using source vocab for target") tgt_vocab_file = src_vocab_file tgt_vocab_size = src_vocab_size else: tgt_vocab_size, tgt_vocab_file = vocab_utils.check_vocab( tgt_vocab_file, hparams.output_dir, check_special_token=hparams.check_special_token, sos=hparams.sos, eos=hparams.eos, unk=vocab_utils.UNK) mlperf_log.gnmt_print(key=mlperf_log.PREPROC_VOCAB_SIZE, value={"src": src_vocab_size, "tgt": tgt_vocab_size}) _add_argument(hparams, "src_vocab_size", src_vocab_size) _add_argument(hparams, "tgt_vocab_size", tgt_vocab_size) _add_argument(hparams, "src_vocab_file", src_vocab_file) _add_argument(hparams, "tgt_vocab_file", tgt_vocab_file) # Num embedding partitions _add_argument( hparams, "num_enc_emb_partitions", hparams.num_embeddings_partitions) _add_argument( hparams, "num_dec_emb_partitions", hparams.num_embeddings_partitions) # Pretrained Embeddings _add_argument(hparams, "src_embed_file", "") _add_argument(hparams, "tgt_embed_file", "") if hparams.embed_prefix: src_embed_file = hparams.embed_prefix + "." + hparams.src tgt_embed_file = hparams.embed_prefix + "." + hparams.tgt if tf.gfile.Exists(src_embed_file): utils.print_out(" src_embed_file %s exist" % src_embed_file) hparams.src_embed_file = src_embed_file utils.print_out( "For pretrained embeddings, set num_enc_emb_partitions to 1") hparams.num_enc_emb_partitions = 1 else: utils.print_out(" src_embed_file %s doesn't exist" % src_embed_file) if tf.gfile.Exists(tgt_embed_file): utils.print_out(" tgt_embed_file %s exist" % tgt_embed_file) hparams.tgt_embed_file = tgt_embed_file utils.print_out( "For pretrained embeddings, set num_dec_emb_partitions to 1") hparams.num_dec_emb_partitions = 1 else: utils.print_out(" tgt_embed_file %s doesn't exist" % tgt_embed_file) # Evaluation metric = "bleu" best_metric_dir = os.path.join(hparams.output_dir, "best_" + metric) tf.gfile.MakeDirs(best_metric_dir) _add_argument(hparams, "best_" + metric, 0, update=False) _add_argument(hparams, "best_" + metric + "_dir", best_metric_dir) if hparams.avg_ckpts: best_metric_dir = os.path.join(hparams.output_dir, "avg_best_" + metric) tf.gfile.MakeDirs(best_metric_dir) _add_argument(hparams, "avg_best_" + metric, 0, update=False) _add_argument(hparams, "avg_best_" + metric + "_dir", best_metric_dir) return hparams def create_or_load_hparams(default_hparams, hparams_path): """Create hparams or load hparams from output_dir.""" hparams = utils.maybe_parse_standard_hparams(default_hparams, hparams_path) hparams = extend_hparams(hparams) # Print HParams utils.print_hparams(hparams) return hparams def run_main(flags, default_hparams, estimator_fn): """Run main.""" # Job jobid = flags.jobid utils.print_out("# Job id %d" % jobid) # Random random_seed = flags.random_seed if random_seed is not None and random_seed > 0: utils.print_out("# Set random seed to %d" % random_seed) random.seed(random_seed + jobid) np.random.seed(random_seed + jobid) tf.set_random_seed(random_seed) # Model output directory output_dir = flags.output_dir if output_dir and not tf.gfile.Exists(output_dir): utils.print_out("# Creating output directory %s ..." % output_dir) tf.gfile.MakeDirs(output_dir) # Load hparams. hparams = create_or_load_hparams(default_hparams, flags.hparams_path) # Train or Evaluation estimator_fn(hparams) return hparams def main(unused_argv): tf.logging.set_verbosity(tf.logging.INFO) if FLAGS.use_fp16 and FLAGS.use_dist_strategy: raise ValueError("use_fp16 and use_dist_strategy aren't compatible") # Set up hacky envvars. # Hack that affects Defun in attention_wrapper.py active_xla_option_nums = np.sum([FLAGS.use_xla, FLAGS.use_autojit_xla, FLAGS.use_xla_compile]) if active_xla_option_nums > 1: raise ValueError( "Only one of use_xla, use_xla_compile, use_autojit_xla can be set") os.environ["use_xla"] = str(FLAGS.use_xla).lower() if FLAGS.use_xla: os.environ["use_defun"] = str(True).lower() else: os.environ["use_defun"] = str(FLAGS.use_defun).lower() utils.print_out("use_defun is %s for attention" % os.environ["use_defun"]) # TODO(jamesqin): retire this config after Cuda9.1 os.environ["use_fp32_batch_matmul"] = ("true" if FLAGS.use_fp32_batch_matmul else "false") os.environ["use_xla_compile"] = "true" if FLAGS.use_xla_compile else "false" os.environ["force_inputs_padding"] = ( "true" if FLAGS.force_inputs_padding else "false") if FLAGS.mode == "train": utils.print_out("Running training mode.") FLAGS.num_buckets = 5 default_hparams = create_hparams(FLAGS) run_main(FLAGS, default_hparams, estimator.train_fn) elif FLAGS.mode == "infer": utils.print_out("Running inference mode.") # Random random_seed = FLAGS.random_seed if random_seed is not None and random_seed > 0: utils.print_out("# Set random seed to %d" % random_seed) random.seed(random_seed) np.random.seed(random_seed) tf.set_random_seed(random_seed) # Model output directory output_dir = FLAGS.output_dir if output_dir and not tf.gfile.Exists(output_dir): utils.print_out("# Creating output directory %s ..." % output_dir) tf.gfile.MakeDirs(output_dir) # Load hparams. default_hparams = create_hparams(FLAGS) default_hparams.num_buckets = 1 # The estimator model_fn is written in a way allowing train hparams to be # passed in infer mode. hparams = create_or_load_hparams(default_hparams, FLAGS.hparams_path) utils.print_out("infer_hparams:") utils.print_hparams(hparams) # Run evaluation when there's a new checkpoint for i, ckpt in enumerate( evaluation_utils.get_all_checkpoints(FLAGS.output_dir)): tf.logging.info("Starting to evaluate...") eval_start = time.time() bleu_score = estimator.eval_fn(hparams, ckpt) eval_end = time.time() utils.print_out("eval time for %d th ckpt: %.2f mins" % (i, (eval_end - eval_start) / 60.), f=sys.stderr) else: assert FLAGS.mode == "train_and_eval" utils.print_out("Running train and eval mode.") # Random random_seed = FLAGS.random_seed if random_seed is not None and random_seed > 0: utils.print_out("# Set random seed to %d" % random_seed) random.seed(random_seed) np.random.seed(random_seed) tf.set_random_seed(random_seed) # Model output directory output_dir = FLAGS.output_dir if output_dir and not tf.gfile.Exists(output_dir): utils.print_out("# Creating output directory %s ..." % output_dir) tf.gfile.MakeDirs(output_dir) # Load hparams. default_hparams = create_hparams(FLAGS) default_hparams.num_buckets = 5 hparams = create_or_load_hparams(default_hparams, FLAGS.hparams_path) utils.print_out("training hparams:") utils.print_hparams(hparams) with tf.gfile.GFile(os.path.join(output_dir, "train_hparams.txt"), "w") as f: f.write(utils.serialize_hparams(hparams) + "\n") # The estimator model_fn is written in a way allowing train hparams to be # passed in infer mode. infer_hparams = tf.contrib.training.HParams(**hparams.values()) infer_hparams.num_buckets = 1 utils.print_out("infer_hparams:") utils.print_hparams(infer_hparams) with tf.gfile.GFile(os.path.join(output_dir, "infer_hparams.txt"), "w") as f: f.write(utils.serialize_hparams(infer_hparams) + "\n") epochs = 0 should_stop = epochs >= FLAGS.max_train_epochs mlperf_log.gnmt_print(key=mlperf_log.TRAIN_LOOP) mlperf_log.gnmt_print(key=mlperf_log.EVAL_TARGET, value=hparams.target_bleu) while not should_stop: utils.print_out("Starting epoch %d" % epochs) mlperf_log.gnmt_print(key=mlperf_log.TRAIN_EPOCH, value=epochs) mlperf_log.gnmt_print( key=mlperf_log.INPUT_SIZE, value=iterator_utils.get_effective_train_epoch_size(hparams)) mlperf_log.gnmt_print( key=mlperf_log.TRAIN_CHECKPOINT, value=("Under " + hparams.output_dir)) try: train_start = time.time() estimator.train_fn(hparams) except tf.errors.OutOfRangeError: utils.print_out("training hits OutOfRangeError", f=sys.stderr) train_end = time.time() utils.print_out("training time for epoch %d: %.2f mins" % (epochs, (train_end - train_start) / 60.), f=sys.stderr) # This is probably sub-optimal, doing eval per-epoch mlperf_log.gnmt_print(key=mlperf_log.EVAL_START) eval_start = time.time() bleu_score = estimator.eval_fn(infer_hparams) eval_end = time.time() utils.print_out("eval time for epoch %d: %.2f mins" % (epochs, (eval_end - eval_start) / 60.), f=sys.stderr) mlperf_log.gnmt_print(key=mlperf_log.EVAL_ACCURACY, value={"epoch": epochs, "value": bleu_score}) mlperf_log.gnmt_print(key=mlperf_log.EVAL_STOP, value=epochs) if FLAGS.debug or bleu_score > FLAGS.target_bleu: should_stop = True utils.print_out( "Stop job since target bleu is reached at epoch %d ." % epochs, f=sys.stderr) mlperf_log.gnmt_print(mlperf_log.RUN_STOP, {"success": True}) if epochs >= FLAGS.max_train_epochs: should_stop = True utils.print_out("Stop job since max_train_epochs is reached.", f=sys.stderr) mlperf_log.gnmt_print(mlperf_log.RUN_STOP, {"success": False}) epochs += 1 mlperf_log.gnmt_print(key=mlperf_log.RUN_FINAL) if __name__ == "__main__": nmt_parser = argparse.ArgumentParser() add_arguments(nmt_parser) FLAGS, unparsed = nmt_parser.parse_known_args() mlperf_log.gnmt_print(key=mlperf_log.RUN_START) tf.app.run(main=main, argv=[sys.argv[0]] + unparsed)
mlperf/training_results_v0.5
v0.5.0/google/cloud_v3.8/resnet-tpuv3-8/code/resnet/model/staging/models/rough/nmt_gpu/nmt.py
Python
apache-2.0
46,513
0.007095
# -*- coding: utf-8 -*- # Copyright(C) 2010 Romain Bignon # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, version 3 of the License. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. # python2.5 compatibility from __future__ import with_statement import os import sys import tempfile from weboob.core.bcall import CallErrors from weboob.capabilities.content import ICapContent from weboob.tools.application.repl import ReplApplication __all__ = ['WebContentEdit'] class WebContentEdit(ReplApplication): APPNAME = 'webcontentedit' VERSION = '0.4' COPYRIGHT = 'Copyright(C) 2010 Romain Bignon' CAPS = ICapContent def do_edit(self, line): """ edit ID Edit a content with $EDITOR, then push it on the website. """ contents = [] for id in line.split(): _id, backend_name = self.parse_id(id) backend_names = (backend_name,) if backend_name is not None else self.enabled_backends contents += [content for backend, content in self.do('get_content', _id, backends=backend_names) if content] if len(contents) == 0: print >>sys.stderr, 'No contents found' return 1 paths = {} for content in contents: tmpdir = os.path.join(tempfile.gettempdir(), "weboob") if not os.path.isdir(tmpdir): os.makedirs(tmpdir) fd, path = tempfile.mkstemp(prefix='%s_' % content.id.replace(os.path.sep, '_'), dir=tmpdir) with os.fdopen(fd, 'w') as f: data = content.content if isinstance(data, unicode): data = data.encode('utf-8') f.write(data) paths[path] = content params = '' if os.environ['EDITOR'] == 'vim': params = '-p' os.system("$EDITOR %s %s" % (params, ' '.join(paths.iterkeys()))) for path, content in paths.iteritems(): with open(path, 'r') as f: data = f.read() try: data = data.decode('utf-8') except UnicodeError: pass if content.content != data: content.content = data else: contents.remove(content) if len(contents) == 0: print 'No changes. Abort.' return print 'Contents changed:\n%s' % ('\n'.join([' * %s' % content.id for content in contents])) message = self.ask('Enter a commit message', default='') if not self.ask('Do you want to push?', default=True): return errors = CallErrors([]) for content in contents: path = [path for path, c in paths.iteritems() if c == content][0] sys.stdout.write('Pushing %s...' % content.id) sys.stdout.flush() try: self.do('push_content', content, message, backends=[content.backend]).wait() except CallErrors, e: errors.errors += e.errors sys.stdout.write(' error (content saved in %s)\n' % path) else: sys.stdout.write(' done\n') os.unlink(path) if len(errors.errors) > 0: raise errors
jocelynj/weboob
weboob/applications/webcontentedit/webcontentedit.py
Python
gpl-3.0
3,817
0.00131
# -*- coding: utf-8 -*- from south.utils import datetime_utils as datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): pass def backwards(self, orm): pass models = { } complete_apps = ['quest']
thejeshgn/quest
quest/migrations/0001_initial.py
Python
gpl-3.0
349
0.005731
import os from django.conf.urls import url, include urlpatterns = [ url(r'^misc/', include('misc.urls')), url(r'^qualification/', include('qualification.urls')), ] if os.environ.get('DJANGO_SETTINGS_MODULE') == 'etrack.settings.dev': import debug_toolbar urlpatterns += [ url(r'^__debug__/', include(debug_toolbar.urls)), ]
jmoreman/eTrack
etrack/urls.py
Python
mit
355
0
# run with https://codepen.io/sjdv1982/pen/MzNvJv # copy-paste seamless-client.js from seamless.highlevel import Context ctx = Context() ctx.cell1 = "test!" ctx.cell1.share() ctx.translate() ctx.compute() from seamless import shareserver print(shareserver.namespaces["ctx"].shares) print(shareserver.namespaces["ctx"].shares["cell1"].bound) print(shareserver.namespaces["ctx"].shares["cell1"].bound.cell) ctx.cell1.celltype = "plain" ctx.translate(force=True) ctx.compute() print(shareserver.namespaces["ctx"].shares) print(shareserver.namespaces["ctx"].shares["cell1"].bound) print(shareserver.namespaces["ctx"].shares["cell1"].bound.cell)
sjdv1982/seamless
tests/lowlevel/codepen-seamless-client.py
Python
mit
643
0.001555
''' forms, mostly used for simple tastypie validation ''' from django.contrib.gis import forms class MeetingForm(forms.Form): ''' form for meetings ''' day_of_week = forms.IntegerField(min_value=1, max_value=7) start_time = forms.TimeField() end_time = forms.TimeField() name = forms.CharField(max_length=100) description = forms.CharField(max_length=255, required=False) address = forms.CharField(max_length=300)
mcjug2015/mfserver2
django_app/forms.py
Python
gpl-3.0
444
0
# -*- coding: utf-8 -*- # Copyright (c) 2015, imageio contributors # imageio is distributed under the terms of the (new) BSD License. """ .. note:: imageio is under construction, some details with regard to the Reader and Writer classes may change. These are the main classes of imageio. They expose an interface for advanced users and plugin developers. A brief overview: * imageio.FormatManager - for keeping track of registered formats. * imageio.Format - representation of a file format reader/writer * imageio.Format.Reader - object used during the reading of a file. * imageio.Format.Writer - object used during saving a file. * imageio.Request - used to store the filename and other info. Plugins need to implement a Format class and register a format object using ``imageio.formats.add_format()``. """ from __future__ import absolute_import, print_function, division # todo: do we even use the known extensions? # Some notes: # # The classes in this module use the Request object to pass filename and # related info around. This request object is instantiated in # imageio.get_reader and imageio.get_writer. from __future__ import with_statement import os import numpy as np from . import Image, asarray from . import string_types, text_type, binary_type # noqa class Format: """ Represents an implementation to read/write a particular file format A format instance is responsible for 1) providing information about a format; 2) determining whether a certain file can be read/written with this format; 3) providing a reader/writer class. Generally, imageio will select the right format and use that to read/write an image. A format can also be explicitly chosen in all read/write functions. Use ``print(format)``, or ``help(format_name)`` to see its documentation. To implement a specific format, one should create a subclass of Format and the Format.Reader and Format.Writer classes. see :doc:`plugins` for details. Parameters ---------- name : str A short name of this format. Users can select a format using its name. description : str A one-line description of the format. extensions : str | list | None List of filename extensions that this format supports. If a string is passed it should be space or comma separated. The extensions are used in the documentation and to allow users to select a format by file extension. It is not used to determine what format to use for reading/saving a file. modes : str A string containing the modes that this format can handle ('iIvV'). This attribute is used in the documentation and to select the formats when reading/saving a file. """ def __init__(self, name, description, extensions=None, modes=None): # Store name and description self._name = name.upper() self._description = description # Store extensions, do some effort to normalize them. # They are stored as a list of lowercase strings without leading dots. if extensions is None: extensions = [] elif isinstance(extensions, string_types): extensions = extensions.replace(',', ' ').split(' ') # if isinstance(extensions, (tuple, list)): self._extensions = [e.strip('.').lower() for e in extensions if e] else: raise ValueError('Invalid value for extensions given.') # Store mode self._modes = modes or '' if not isinstance(self._modes, string_types): raise ValueError('Invalid value for modes given.') for m in self._modes: if m not in 'iIvV?': raise ValueError('Invalid value for mode given.') def __repr__(self): # Short description return '<Format %s - %s>' % (self.name, self.description) def __str__(self): return self.doc @property def doc(self): """ The documentation for this format (name + description + docstring). """ # Our docsring is assumed to be indented by four spaces. The # first line needs special attention. return '%s - %s\n\n %s\n' % (self.name, self.description, self.__doc__.strip()) @property def name(self): """ The name of this format. """ return self._name @property def description(self): """ A short description of this format. """ return self._description @property def extensions(self): """ A list of file extensions supported by this plugin. These are all lowercase without a leading dot. """ return self._extensions @property def modes(self): """ A string specifying the modes that this format can handle. """ return self._modes def get_reader(self, request): """ get_reader(request) Return a reader object that can be used to read data and info from the given file. Users are encouraged to use imageio.get_reader() instead. """ select_mode = request.mode[1] if request.mode[1] in 'iIvV' else '' if select_mode not in self.modes: raise RuntimeError('Format %s cannot read in mode %r' % (self.name, select_mode)) return self.Reader(self, request) def get_writer(self, request): """ get_writer(request) Return a writer object that can be used to write data and info to the given file. Users are encouraged to use imageio.get_writer() instead. """ select_mode = request.mode[1] if request.mode[1] in 'iIvV' else '' if select_mode not in self.modes: raise RuntimeError('Format %s cannot write in mode %r' % (self.name, select_mode)) return self.Writer(self, request) def can_read(self, request): """ can_read(request) Get whether this format can read data from the specified uri. """ return self._can_read(request) def can_write(self, request): """ can_write(request) Get whether this format can write data to the speciefed uri. """ return self._can_write(request) def _can_read(self, request): # pragma: no cover return None # Plugins must implement this def _can_write(self, request): # pragma: no cover return None # Plugins must implement this # ----- class _BaseReaderWriter(object): """ Base class for the Reader and Writer class to implement common functionality. It implements a similar approach for opening/closing and context management as Python's file objects. """ def __init__(self, format, request): self.__closed = False self._BaseReaderWriter_last_index = -1 self._format = format self._request = request # Open the reader/writer self._open(**self.request.kwargs.copy()) @property def format(self): """ The :class:`.Format` object corresponding to the current read/write operation. """ return self._format @property def request(self): """ The :class:`.Request` object corresponding to the current read/write operation. """ return self._request def __enter__(self): self._checkClosed() return self def __exit__(self, type, value, traceback): if value is None: # Otherwise error in close hide the real error. self.close() def __del__(self): try: self.close() except Exception: # pragma: no cover pass # Supress noise when called during interpreter shutdown def close(self): """ Flush and close the reader/writer. This method has no effect if it is already closed. """ if self.__closed: return self.__closed = True self._close() # Process results and clean request object self.request.finish() @property def closed(self): """ Whether the reader/writer is closed. """ return self.__closed def _checkClosed(self, msg=None): """Internal: raise an ValueError if reader/writer is closed """ if self.closed: what = self.__class__.__name__ msg = msg or ("I/O operation on closed %s." % what) raise RuntimeError(msg) # To implement def _open(self, **kwargs): """ _open(**kwargs) Plugins should probably implement this. It is called when reader/writer is created. Here the plugin can do its initialization. The given keyword arguments are those that were given by the user at imageio.read() or imageio.write(). """ raise NotImplementedError() def _close(self): """ _close() Plugins should probably implement this. It is called when the reader/writer is closed. Here the plugin can do a cleanup, flush, etc. """ raise NotImplementedError() # ----- class Reader(_BaseReaderWriter): """ The purpose of a reader object is to read data from an image resource, and should be obtained by calling :func:`.get_reader`. A reader can be used as an iterator to read multiple images, and (if the format permits) only reads data from the file when new data is requested (i.e. streaming). A reader can also be used as a context manager so that it is automatically closed. Plugins implement Reader's for different formats. Though rare, plugins may provide additional functionality (beyond what is provided by the base reader class). """ def get_length(self): """ get_length() Get the number of images in the file. (Note: you can also use ``len(reader_object)``.) The result can be: * 0 for files that only have meta data * 1 for singleton images (e.g. in PNG, JPEG, etc.) * N for image series * inf for streams (series of unknown length) """ return self._get_length() def get_data(self, index, **kwargs): """ get_data(index, **kwargs) Read image data from the file, using the image index. The returned image has a 'meta' attribute with the meta data. Some formats may support additional keyword arguments. These are listed in the documentation of those formats. """ self._checkClosed() self._BaseReaderWriter_last_index = index im, meta = self._get_data(index, **kwargs) return Image(im, meta) # Image tests im and meta def get_next_data(self, **kwargs): """ get_next_data(**kwargs) Read the next image from the series. Some formats may support additional keyword arguments. These are listed in the documentation of those formats. """ return self.get_data(self._BaseReaderWriter_last_index+1, **kwargs) def get_meta_data(self, index=None): """ get_meta_data(index=None) Read meta data from the file. using the image index. If the index is omitted or None, return the file's (global) meta data. Note that ``get_data`` also provides the meta data for the returned image as an atrribute of that image. The meta data is a dict, which shape depends on the format. E.g. for JPEG, the dict maps group names to subdicts and each group is a dict with name-value pairs. The groups represent the different metadata formats (EXIF, XMP, etc.). """ self._checkClosed() meta = self._get_meta_data(index) if not isinstance(meta, dict): raise ValueError('Meta data must be a dict, not %r' % meta.__class__.__name__) return meta def iter_data(self): """ iter_data() Iterate over all images in the series. (Note: you can also iterate over the reader object.) """ self._checkClosed() i, n = 0, self.get_length() while i < n: try: im, meta = self._get_data(i) except IndexError: if n == float('inf'): return raise yield Image(im, meta) i += 1 # Compatibility def __iter__(self): return self.iter_data() def __len__(self): return self.get_length() # To implement def _get_length(self): """ _get_length() Plugins must implement this. The retured scalar specifies the number of images in the series. See Reader.get_length for more information. """ raise NotImplementedError() def _get_data(self, index): """ _get_data() Plugins must implement this, but may raise an IndexError in case the plugin does not support random access. It should return the image and meta data: (ndarray, dict). """ raise NotImplementedError() def _get_meta_data(self, index): """ _get_meta_data(index) Plugins must implement this. It should return the meta data as a dict, corresponding to the given index, or to the file's (global) meta data if index is None. """ raise NotImplementedError() # ----- class Writer(_BaseReaderWriter): """ The purpose of a writer object is to write data to an image resource, and should be obtained by calling :func:`.get_writer`. A writer will (if the format permits) write data to the file as soon as new data is provided (i.e. streaming). A writer can also be used as a context manager so that it is automatically closed. Plugins implement Writer's for different formats. Though rare, plugins may provide additional functionality (beyond what is provided by the base writer class). """ def append_data(self, im, meta=None): """ append_data(im, meta={}) Append an image (and meta data) to the file. The final meta data that is used consists of the meta data on the given image (if applicable), updated with the given meta data. """ self._checkClosed() # Check image data if not isinstance(im, np.ndarray): raise ValueError('append_data requires ndarray as first arg') # Get total meta dict total_meta = {} if hasattr(im, 'meta') and isinstance(im.meta, dict): total_meta.update(im.meta) if meta is None: pass elif not isinstance(meta, dict): raise ValueError('Meta must be a dict.') else: total_meta.update(meta) # Decouple meta info im = asarray(im) # Call return self._append_data(im, total_meta) def set_meta_data(self, meta): """ set_meta_data(meta) Sets the file's (global) meta data. The meta data is a dict which shape depends on the format. E.g. for JPEG the dict maps group names to subdicts, and each group is a dict with name-value pairs. The groups represents the different metadata formats (EXIF, XMP, etc.). Note that some meta formats may not be supported for writing, and individual fields may be ignored without warning if they are invalid. """ self._checkClosed() if not isinstance(meta, dict): raise ValueError('Meta must be a dict.') else: return self._set_meta_data(meta) # To implement def _append_data(self, im, meta): # Plugins must implement this raise NotImplementedError() def _set_meta_data(self, meta): # Plugins must implement this raise NotImplementedError() class FormatManager: """ There is exactly one FormatManager object in imageio: ``imageio.formats``. Its purpose it to keep track of the registered formats. The format manager supports getting a format object using indexing (by format name or extension). When used as an iterator, this object yields all registered format objects. See also :func:`.help`. """ def __init__(self): self._formats = [] def __repr__(self): return '<imageio.FormatManager with %i registered formats>' % len(self) def __iter__(self): return iter(self._formats) def __len__(self): return len(self._formats) def __str__(self): ss = [] for format in self._formats: ext = ', '.join(format.extensions) s = '%s - %s [%s]' % (format.name, format.description, ext) ss.append(s) return '\n'.join(ss) def __getitem__(self, name): # Check if not isinstance(name, string_types): raise ValueError('Looking up a format should be done by name ' 'or by extension.') # Test if name is existing file if os.path.isfile(name): from . import Request format = self.search_read_format(Request(name, 'r?')) if format is not None: return format if '.' in name: # Look for extension e1, e2 = os.path.splitext(name) name = e2 or e1 # Search for format that supports this extension name = name.lower()[1:] for format in self._formats: if name in format.extensions: return format else: # Look for name name = name.upper() for format in self._formats: if name == format.name: return format else: # Maybe the user meant to specify an extension return self['.'+name.lower()] # Nothing found ... raise IndexError('No format known by name %s.' % name) def add_format(self, format): """ add_formar(format) Register a format, so that imageio can use it. """ if not isinstance(format, Format): raise ValueError('add_format needs argument to be a Format object') elif format in self._formats: raise ValueError('Given Format instance is already registered') else: self._formats.append(format) def search_read_format(self, request): """ search_read_format(request) Search a format that can read a file according to the given request. Returns None if no appropriate format was found. (used internally) """ select_mode = request.mode[1] if request.mode[1] in 'iIvV' else '' for format in self._formats: if select_mode in format.modes: if format.can_read(request): return format def search_write_format(self, request): """ search_write_format(request) Search a format that can write a file according to the given request. Returns None if no appropriate format was found. (used internally) """ select_mode = request.mode[1] if request.mode[1] in 'iIvV' else '' for format in self._formats: if select_mode in format.modes: if format.can_write(request): return format
blink1073/imageio
imageio/core/format.py
Python
bsd-2-clause
21,404
0.006634
"""Serialization of geometries for use in pyIEM.plot mapping We use a pickled protocol=2, which is compat binary. """ from pandas import read_sql from pyiem.util import get_dbconnstr PATH = "../src/pyiem/data/ramps/" # Be annoying print("Be sure to run this against Mesonet database and not laptop!") def do(ramp): """states.""" df = read_sql( "SELECT l.coloridx, l.value, l.r, l.g, l.b from iemrasters_lookup l " "JOIN iemrasters r ON (l.iemraster_id = r.id) WHERE r.name = %s and " "value is not null " "ORDER by coloridx ASC", get_dbconnstr("mesosite"), params=(ramp,), index_col="coloridx", ) df.to_csv(f"{PATH}{ramp}.txt") def main(): """Go Main""" for table in ["composite_n0r", "composite_n0q"]: do(table) if __name__ == "__main__": main()
akrherz/pyIEM
util/make_ramps.py
Python
mit
848
0
"""Tests for the RunTaskCommand class""" from cumulusci.cli.runtime import CliRuntime from cumulusci.cli.cci import RunTaskCommand import click import pytest from unittest.mock import Mock, patch from cumulusci.cli import cci from cumulusci.core.exceptions import CumulusCIUsageError from cumulusci.cli.tests.utils import run_click_command, DummyTask color_opts = {"options": {"color": {}}} multiple_opts = {"options": {"foo": {}, "bar": {}, "baz": {}}} test_tasks = { "dummy-task": {"class_path": "cumulusci.cli.tests.utils.DummyTask"}, "dummy-derived-task": { "class_path": "cumulusci.cli.tests.test_run_task.DummyDerivedTask" }, } @pytest.fixture def runtime(): runtime = CliRuntime(load_keychain=False) runtime.project_config.config["tasks"] = {**test_tasks} runtime.keychain = Mock() runtime.keychain.get_default_org.return_value = (None, None) with patch("cumulusci.cli.cci.RUNTIME", runtime): yield runtime def test_task_run(runtime): DummyTask._run_task = Mock() multi_cmd = cci.RunTaskCommand() cmd = multi_cmd.get_command(Mock, "dummy-task") run_click_command(cmd, "dummy-task", color="blue", runtime=runtime) DummyTask._run_task.assert_called_once() def test_task_run__no_project(runtime): runtime.project_config = None runtime.project_config_error = Exception("Broken") with pytest.raises(Exception, match="Broken"): cci.RunTaskCommand().get_command(Mock, "dummy-task") def test_task_run__debug_before(runtime): DummyTask._run_task = Mock() multi_cmd = cci.RunTaskCommand() set_trace = Mock(side_effect=SetTrace) with patch("pdb.set_trace", set_trace): with pytest.raises(SetTrace): cmd = multi_cmd.get_command(Mock(), "dummy-task") run_click_command( cmd, "dummy_task", color="blue", debug_before=True, debug_after=False, runtime=runtime, ) def test_task_run__debug_after(runtime): DummyTask._run_task = Mock() multi_cmd = cci.RunTaskCommand() set_trace = Mock(side_effect=SetTrace) with patch("pdb.set_trace", set_trace): with pytest.raises(SetTrace): cmd = multi_cmd.get_command(Mock(), "dummy-task") run_click_command( cmd, "dummy-task", color="blue", debug_before=False, debug_after=True, runtime=runtime, ) def test_task_run__list_commands(runtime): multi_cmd = cci.RunTaskCommand() commands = multi_cmd.list_commands(Mock()) assert commands == ["dummy-derived-task", "dummy-task"] def test_format_help(runtime): with patch("cumulusci.cli.cci.click.echo") as echo: runtime.universal_config = Mock() RunTaskCommand().format_help(Mock(), Mock()) assert 4 == echo.call_count assert 0 == len(runtime.universal_config.method_calls) def test_get_default_command_options(): opts = RunTaskCommand()._get_default_command_options(is_salesforce_task=False) assert len(opts) == 4 opts = RunTaskCommand()._get_default_command_options(is_salesforce_task=True) assert len(opts) == 5 assert any([o.name == "org" for o in opts]) def test_collect_task_options(): new_options = {"debug-before": None} old_options = (("color", "green"),) opts = RunTaskCommand()._collect_task_options( new_options, old_options, "dummy-task", color_opts["options"] ) assert opts == {"color": "green"} def test_collect_task_options__duplicate(): new_options = {"color": "aqua"} old_options = (("color", "green"),) with pytest.raises(CumulusCIUsageError): RunTaskCommand()._collect_task_options( new_options, old_options, "dummy-task", color_opts["options"] ) def test_collect_task_options__not_in_task(): new_options = {} old_options = (("color", "green"),) with pytest.raises(CumulusCIUsageError): RunTaskCommand()._collect_task_options( new_options, old_options, "dummy-task", {"not-color": {}} ) class SetTrace(Exception): pass class DummyDerivedTask(DummyTask): def _run_task(self): click.echo(f"<{self.__class__}>\n\tcolor: {self.options['color']}")
SalesforceFoundation/CumulusCI
cumulusci/cli/tests/test_run_task.py
Python
bsd-3-clause
4,367
0.000458
import base64 import httplib import json import os import re import ssl import urllib from urlparse import urlunsplit from exceptions import CloudPassageAuthentication class HaloSession(object): """All Halo API session management happens in this object. Args: key(str): Halo API key secret(str): Halo API secret Kwargs: api_host(str): Hostname for Halo API. Defaults to ``api.cloudpassage.com`` cert_file(str): Full path to CA file. integration_string(str): This identifies a specific integration to the Halo API. """ def __init__(self, halo_key, halo_secret, **kwargs): self.key = halo_key self.secret = halo_secret self.api_host = "api.cloudpassage.com" self.sdk_version = self.get_sdk_version() self.sdk_version_string = "Halo-Python-SDK-slim/%s" % self.sdk_version self.integration_string = '' self.cert_file = None if "api_host" in kwargs: self.api_host = kwargs["api_host"] if "cert_file" in kwargs: self.cert_file = kwargs["cert_file"] if "integration_string" in kwargs: self.integration_string = kwargs["integration_string"] self.user_agent = self.build_ua_string(self.sdk_version_string, self.integration_string) self.threads = 10 self.api_token = None def authenticate(self): """Obtain and set an oauth API token.""" headers = self.build_auth_headers(self.key, self.secret) headers["User-Agent"] = self.user_agent params = urllib.urlencode({'grant_type': 'client_credentials'}) if self.cert_file is None: connection = httplib.HTTPSConnection(self.api_host) else: ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1_2) ctx.load_verify_locations(self.cert_file) connection = httplib.HTTPSConnection(self.api_host, context=ctx) connection.request("POST", '/oauth/access_token', params, headers) response = connection.getresponse() code = response.status body = response.read().decode() if code == 401: # Bad API key... raise CloudPassageAuthentication(json.dumps(body)) self.api_token = json.loads(body)['access_token'] return True @classmethod def build_auth_headers(cls, key, secret): """Create an auth string for Halo oauth.""" credfmt = "{key}:{secret}".format(key=key, secret=secret) creds = base64.b64encode(credfmt) auth_string = "Basic {creds}".format(creds=creds) auth_header = {"Authorization": auth_string} return auth_header def build_header(self): """This builds the header, required for all API interaction.""" if self.api_token is None: self.authenticate() authstring = "Bearer " + self.api_token header = {"Authorization": authstring, "Content-Type": "application/json", "User-Agent": self.user_agent} return header @classmethod def build_ua_string(cls, sdk_version_str, integration_string): ua = "{sdk} {integration}".format(sdk=sdk_version_str, integration=integration_string) return ua def build_url(self, endpoint): """Build a URL from parts.""" url = urlunsplit(("https", self.api_host, endpoint, "", "")) return url @classmethod def get_sdk_version(cls): here_dir = os.path.abspath(os.path.dirname(__file__)) init_file_path = os.path.join(here_dir, "__init__.py") raw_init_file = open(init_file_path).read() rx_compiled = re.compile(r"\s*__version__\s*=\s*\"(\S+)\"") version = rx_compiled.search(raw_init_file).group(1) return version
ashmastaflash/cloudpassage_slim
cloudpassage_slim/halo_session.py
Python
bsd-2-clause
3,951
0
# This module is for compatibility only. All functions are defined elsewhere. __all__ = ['rand', 'tril', 'trapz', 'hanning', 'rot90', 'triu', 'diff', 'angle', 'roots', 'ptp', 'kaiser', 'randn', 'cumprod', 'diag', 'msort', 'LinearAlgebra', 'RandomArray', 'prod', 'std', 'hamming', 'flipud', 'max', 'blackman', 'corrcoef', 'bartlett', 'eye', 'squeeze', 'sinc', 'tri', 'cov', 'svd', 'min', 'median', 'fliplr', 'eig', 'mean'] import numpy.oldnumeric.linear_algebra as LinearAlgebra import numpy.oldnumeric.random_array as RandomArray from numpy import tril, trapz as _Ntrapz, hanning, rot90, triu, diff, \ angle, roots, ptp as _Nptp, kaiser, cumprod as _Ncumprod, \ diag, msort, prod as _Nprod, std as _Nstd, hamming, flipud, \ amax as _Nmax, amin as _Nmin, blackman, bartlett, \ squeeze, sinc, median, fliplr, mean as _Nmean, transpose from numpy.linalg import eig, svd from numpy.random import rand, randn import numpy as np from typeconv import convtypecode def eye(N, M=None, k=0, typecode=None, dtype=None): """ eye returns a N-by-M 2-d array where the k-th diagonal is all ones, and everything else is zeros. """ dtype = convtypecode(typecode, dtype) if M is None: M = N m = np.equal(np.subtract.outer(np.arange(N), np.arange(M)),-k) if m.dtype != dtype: return m.astype(dtype) def tri(N, M=None, k=0, typecode=None, dtype=None): """ returns a N-by-M array where all the diagonals starting from lower left corner up to the k-th are all ones. """ dtype = convtypecode(typecode, dtype) if M is None: M = N m = np.greater_equal(np.subtract.outer(np.arange(N), np.arange(M)),-k) if m.dtype != dtype: return m.astype(dtype) def trapz(y, x=None, axis=-1): return _Ntrapz(y, x, axis=axis) def ptp(x, axis=0): return _Nptp(x, axis) def cumprod(x, axis=0): return _Ncumprod(x, axis) def max(x, axis=0): return _Nmax(x, axis) def min(x, axis=0): return _Nmin(x, axis) def prod(x, axis=0): return _Nprod(x, axis) def std(x, axis=0): N = asarray(x).shape[axis] return _Nstd(x, axis)*sqrt(N/(N-1.)) def mean(x, axis=0): return _Nmean(x, axis) # This is exactly the same cov function as in MLab def cov(m, y=None, rowvar=0, bias=0): if y is None: y = m else: y = y if rowvar: m = transpose(m) y = transpose(y) if (m.shape[0] == 1): m = transpose(m) if (y.shape[0] == 1): y = transpose(y) N = m.shape[0] if (y.shape[0] != N): raise ValueError("x and y must have the same number of observations") m = m - _Nmean(m,axis=0) y = y - _Nmean(y,axis=0) if bias: fact = N*1.0 else: fact = N-1.0 return squeeze(dot(transpose(m), conjugate(y)) / fact) from numpy import sqrt, multiply def corrcoef(x, y=None): c = cov(x, y) d = diag(c) return c/sqrt(multiply.outer(d,d)) from compat import * from functions import * from precision import * from ufuncs import * from misc import * import compat import precision import functions import misc import ufuncs import numpy __version__ = numpy.__version__ del numpy __all__ += ['__version__'] __all__ += compat.__all__ __all__ += precision.__all__ __all__ += functions.__all__ __all__ += ufuncs.__all__ __all__ += misc.__all__ del compat del functions del precision del ufuncs del misc
beiko-lab/gengis
bin/Lib/site-packages/numpy/oldnumeric/mlab.py
Python
gpl-3.0
3,566
0.009534
#TSTOP # #This program is free software: you can redistribute it and/or modify #it under the terms of the GNU General Public License as published by #the Free Software Foundation, either version 3 of the License, or #(at your option) any later version. # #This program is distributed in the hope that it will be useful, #but WITHOUT ANY WARRANTY; without even the implied warranty of #MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #GNU General Public License for more details. # #You should have received a copy of the GNU General Public License #along with this program. If not, see <http://www.gnu.org/licenses/>. import time import sys import json import csv import itertools import argparse from Datatypes.Configuration import get_filename from Datatypes.Segments import Segments, Segment import os class PAMAPSegments(Segments): """ Segments generated from the Physical Activity Monitoring Data Set from https://archive.ics.uci.edu/ml/datasets/PAMAP2+Physical+Activity+Monitoring Labels are derived from the second value in each line of the file, followed by a heart rate and readings from three inertial measurement units (that sporadically have dropped values). The values in config.data_index correspond to the index of the data values used for segments and windows. In the event of a dropped value, indicated by NaN in the file, we carry over the previous data value. """ def __init__(self, config) : config.label_index = 1 super(self.__class__, self).__init__(config) if isinstance(self.config.data_file, list) : self.config.data_file = self.config.data_file[0] if not isinstance(self.config.data_index, list) : self.config.data_index = [self.config.data_index] with open(self.config.data_file, 'r') as data_file : data_reader = csv.reader(data_file, delimiter=' ') full_data = [line for line in data_reader] # carry over previous values for any NaNs prev_line = full_data[0] line_range = range(len(prev_line)) for line in full_data[1:] : for (l0, l1, i) in zip(prev_line, line, line_range) : if l1 == "NaN" : line[i] = l0 prev_line = line label_set = set([d[self.config.label_index] for d in full_data]) if self.config.window_size == -1 : self.config.window_size = self.config.segment_size self.segments = [] for segment_start in range(0, len(full_data) - self.config.segment_size + 1, self.config.segment_stride) : segment_end = segment_start + self.config.segment_size windows = [] # if the data_index has more than one entry, interleave the results. # e.g. if data_index is [1,2] it's [(x_0, label), (y_0, label), (x_1, label), (y_1, label)...] for window_start in range(segment_start, segment_end - self.config.window_size + 1, self.config.window_stride): window_end = window_start + self.config.window_size windows.append(list(itertools.chain(*itertools.izip(*[[float(d[i]) for d in full_data[window_start:window_end]] \ for i in self.config.data_index])))) labels = [d[self.config.label_index] for d in full_data[segment_start:segment_end]] label_dict = dict([(str(l), len([d for d in labels if d == l])) for l in list(set(labels))]) segment = Segment(windows=windows, segment_start=segment_start, segment_size=self.config.segment_size, window_stride=self.config.window_stride, window_size=self.config.window_size, labels=label_dict, filename=self.config.data_file, data_index = self.config.data_index, label_index = self.config.label_index) self.segments.append(segment) @staticmethod def get_segment_filename(config, gz=True): fields = ['data_file', 'data_index', 'segment_size', 'segment_stride', 'window_size', 'window_stride'] return get_filename(config, fields, 'PAMAPSegments', gz)
gpersistence/tstop
python/persistence/PAMAPSegments.py
Python
gpl-3.0
4,400
0.009091
from django.conf.urls.defaults import * from django.contrib import admin from django.conf import settings admin.autodiscover() urlpatterns = patterns('', (r'^kipa/', include('tupa.urls')), (r'^admin/', include(admin.site.urls)), ) if settings.DEBUG : urlpatterns += patterns('', (r'^kipamedia/(?P<path>.*)$', 'django.views.static.serve', {'document_root': settings.STATIC_DOC_ROOT}),) handler500 = 'tupa.views.raportti_500'
siimeon/Kipa
web/urls.py
Python
gpl-3.0
485
0.010309
# coding: utf-8 # Copyright (c) Pymatgen Development Team. # Distributed under the terms of the MIT License. from __future__ import unicode_literals """ This package provides the modules to perform FEFF IO. FEFF: http://feffproject.org/feffproject-feff.html """ from .inputs import * from .outputs import *
johnson1228/pymatgen
pymatgen/io/feff/__init__.py
Python
mit
311
0
from django.contrib import admin from django.core.urlresolvers import NoReverseMatch from . import models class WPSiteAdmin(admin.ModelAdmin): list_display = ('name', 'url', 'hook') readonly_fields = ('name', 'description') def save_model(self, request, obj, form, change): # TODO do this sync async (give celery another shot?) obj.save() obj.fetch_all() # CUSTOM METHODS # def hook(self, obj): """ This is where an admin can find what url to point the webhook to. Doing it as an absolute url lets us cheat and make the browser figure out the host for us. Requires HookPress: http://wordpress.org/plugins/hookpress/ """ try: return (u'<a href="{}" title="Add a save_post hook with the ID">' 'Webhook</a>'.format(obj.hook_url)) except NoReverseMatch: return '' hook.allow_tags = True admin.site.register(models.WPSite, WPSiteAdmin) class WPUserAdmin(admin.ModelAdmin): readonly_fields = ('synced_at', ) admin.site.register(models.WPUser, WPUserAdmin) class WPCategoryAdmin(admin.ModelAdmin): readonly_fields = ('synced_at', ) admin.site.register(models.WPCategory, WPCategoryAdmin) class WPTagAdmin(admin.ModelAdmin): readonly_fields = ('synced_at', ) admin.site.register(models.WPTag, WPTagAdmin) class WPPostAdmin(admin.ModelAdmin): list_display = ('title', 'date', 'type', 'status', ) list_filter = ('type', 'status', ) readonly_fields = ('synced_at', ) admin.site.register(models.WPPost, WPPostAdmin) class WPLogAdmin(admin.ModelAdmin): list_display = ('timestamp', 'wp', 'action', ) list_filter = ('wp', 'action', ) readonly_fields = ('wp', 'timestamp', 'action', 'body', ) admin.site.register(models.WPLog, WPLogAdmin)
texastribune/wjordpress
wjordpress/admin.py
Python
apache-2.0
1,830
0.003825
#-*- coding: utf-8 -*- ''' Created on 24 дек. 20%0 @author: ivan ''' import random all_agents = """ Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.9.1.3) Gecko/20090913 Firefox/3.5.3 Mozilla/5.0 (Windows; U; Windows NT 6.1; en; rv:1.9.1.3) Gecko/20090824 Firefox/3.5.3 (.NET CLR 3.5.30729) Mozilla/5.0 (Windows; U; Windows NT 5.2; en-US; rv:1.9.1.3) Gecko/20090824 Firefox/3.5.3 (.NET CLR 3.5.30729) Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US; rv:1.9.1.1) Gecko/20090718 Firefox/3.5.1 Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/532.1 (KHTML, like Gecko) Chrome/4.0.219.6 Safari/532.1 Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; InfoPath.2) Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.0; Trident/4.0; SLCC1; .NET CLR 2.0.50727; .NET CLR 1.1.4322; .NET CLR 3.5.30729; .NET CLR 3.0.30729) Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 5.2; Win64; x64; Trident/4.0) Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 5.1; Trident/4.0; SV1; .NET CLR 2.0.50727; InfoPath.2)Mozilla/5.0 (Windows; U; MSIE 7.0; Windows NT 6.0; en-US) Mozilla/4.0 (compatible; MSIE 6.1; Windows XP) """ def get_ranmom_agent(): agents = None for i in xrange(10): agents = all_agents.replace(str(i), str(random.randint(0, 10))) return agents.splitlines()[random.randint(1, 10)]
sitexa/foobnix
foobnix/util/agent.py
Python
gpl-3.0
1,349
0.007429
#!/usr/bin/evn python # Start a network import time import os import re import sys import commands import libvirt from libvirt import libvirtError from src import sharedmod required_params = ('networkname',) optional_params = {} def start(params): """activate a defined network""" global logger logger = params['logger'] params.pop('logger') networkname = params['networkname'] logger.info("the name of virtual network to be activated is %s" % \ networkname) conn = sharedmod.libvirtobj['conn'] net_defined_list = conn.listDefinedNetworks() if networkname not in net_defined_list: logger.error("virtual network %s doesn't exist \ or is active already." % networkname) return 1 else: netobj = conn.networkLookupByName(networkname) netxmldesc = netobj.XMLDesc(0) logger.debug("the xml description of the virtual network is %s" % \ netxmldesc) try: logger.info("begin to activate virtual network %s" % networkname) netobj.create() except libvirtError, e: logger.error("API error message: %s, error code is %s" \ % (e.message, e.get_error_code())) logger.error("fail to destroy domain") return 1 net_activated_list = conn.listNetworks() if networkname not in net_activated_list: logger.error("virtual network %s failed to be activated." % networkname) return 1 else: shell_cmd = "virsh net-list --all" (status, text) = commands.getstatusoutput(shell_cmd) logger.debug("the output of 'virsh net-list --all' is %s" % text) logger.info("activate the virtual network successfully.") time.sleep(3) return 0
ryanmiao/libvirt-test-API
repos/network/start.py
Python
gpl-2.0
1,786
0.003919
#!/usr/bin/python import os, sys from termcolor import colored try: import requests except: print "[!]Requests module not found. Try to (re)install it.\n[!]pip install requests" oks = [] print("EASY DIRBUSTER!") def parser(): #URL flag = False while ( flag == False ): url = raw_input("Insert an URL (with HTTP:// or HTTPS://)\n\tURL: ") if (url.startswith("http://")): flag = True elif (url.startswith("https://")): flag = True else: pass #PATH flag = False while ( flag == False ): path = raw_input("Insert path to File (ex: /root/wordlists/list.txt)\n\tPATH: ") if (os.path.isfile(path)): flag = True else: pass return url, path def requester(url, fpath): if (requests.get(url).status_code != 200): return 0 else: with open(fpath) as f: for path in f: temp1 = url + "/" + str(path).replace("\n","") temp2 = url + "/" + str(path).replace("\n","") + "/" for temp in temp1,temp2: r = requests.get(temp) if (r.status_code == 200): print colored(("[!] " + str(r.status_code) + " OK!! -> " + temp), 'green') oks.append(str(temp)) elif (r.status_code == 403): print colored(("[!] " + str(r.status_code) + " Forb -> " + temp), 'yellow') else: print colored(("[!] " + str(r.status_code) + " NotF -> " + temp), 'red') return 1 url, path = parser() #print url + path if (requester(url, path) == 0): print "Error. URL not available." else: print "200 OK Requested sites: " for ok in oks: print "\t" + ok print "Finished Successfully!"
remiotore/Python-Tools
DirBuster.py
Python
mit
1,875
0.011733
''' Created on 10/mar/2014 @author: sectumsempra ''' import sys ''' from variab_conc import errbox from PyQt4 import QtGui, QtCore, Qt from PyQt4.QtCore import pyqtSignal, SLOT from PyQt4.QtCore import pyqtSlot, SIGNAL from PyQt4.QtGui import QPushButton, QTextEdit, QTableWidgetItem ''' from PyQt4 import QtGui, QtCore import signal import time var_str = 'string' nom_str = 'ss' numvarinp = 'ala' numconcinp = 'asda' var_lis = [] nom_lis = [] pes_lis = [] punt_fin_dis = [] punt_fin_ord = [] punt_ordinati = [] class_fin = [] tab_pesi = None class confgui(QtGui.QMainWindow): def __init__(self): QtGui.QMainWindow.__init__(self) global numvarinp, numconcinp bwidget = QtGui.QWidget(self) master_columner = QtGui.QVBoxLayout() grid = QtGui.QGridLayout() #toplabel = QtGui.QLabel('Setup') button1 =QtGui.QPushButton('Start!', bwidget) vbox00 = QtGui.QVBoxLayout() numconc = QtGui.QLabel('Concorrenti', bwidget) vbox00.addWidget(numconc) self.setWindowTitle("BESTAPP Config") numconcinp = QtGui.QTextEdit() vbox00.addWidget(numconcinp) vbox01 = QtGui.QVBoxLayout() numvar = QtGui.QLabel('Variabili', bwidget) numvarinp = QtGui.QTextEdit() vbox01.addWidget(numvar) vbox01.addWidget(numvarinp) grid.addLayout(vbox00, 0, 0) grid.addLayout(vbox01, 0, 1) #master_columner.addWidget(toplabel) master_columner.addLayout(grid) master_columner.addWidget(button1) bwidget.setLayout(master_columner) self.setCentralWidget(bwidget) button1.clicked.connect(self.settings) numvarinp.textChanged.connect(self.var_to_str) numconcinp.textChanged.connect(self.nom_to_str) self.resize(600, 400) self.setStyleSheet("font: 16pt \"DejaVu Serif\";\n ") def switchwind(self): global varconfig self.hide() varconfig = valorideipesi() print("initdone") varconfig.showMaximized() print("alldonw") def settings(self): self.varstr_to_list() self.nomstr_to_list() self.switchwind() def varstr_to_list(self): global var_lis global var_str var_lis = [] f = open('lista_var.txt', 'w+') a = str(var_str) f.write(a) f.seek(0, 0) for line in f: linea = line.rstrip('\n') var_lis.append(linea) f.close() def nomstr_to_list(self): global nom_lis global nom_str nom_lis = [] f = open ('lista_nomi.txt', 'w+') a = str(nom_str) f.write(a) f.seek(0, 0) for line in f: linea = line.rstrip('\n') nom_lis.append(linea) print(nom_lis) f.close() def nom_to_str(self): global nom_str nom_str = numconcinp.toPlainText() def var_to_str(self): global var_str var_str = numvarinp.toPlainText() class maingui(QtGui.QMainWindow): #global nom_lis #global var_lis b = [] col = [] def __init__(self): global nom_lis, var_lis QtGui.QMainWindow.__init__(self) bWidget = QtGui.QWidget(self) self.len_nom_lis = len(nom_lis) self.len_var_lis = len(var_lis) self.tabellone = QtGui.QTableWidget() self.tabellone.setColumnCount(self.len_var_lis) self.tabellone.setRowCount(self.len_nom_lis) self.button_save = QtGui.QPushButton("save", bWidget) mainlay = QtGui.QVBoxLayout() mainlay.addWidget(self.tabellone) mainlay.addWidget(self.button_save) bWidget.setLayout(mainlay) self.setCentralWidget(bWidget) self.setWindowTitle("BESTAPP Config") self.grid_mk(nom_lis, var_lis) def grid_mk(self, rw_names, col_names): rw_num = len(rw_names) col_num = len(col_names) """ for a in range(col_num): for b in range(rw_num): aleph = QtGui.QTableWidgetItem(0) aleph.setText(str("0")) self.tabellone.setItem(b, a, aleph) """ self.tabellone.setHorizontalHeaderLabels(col_names) self.tabellone.setVerticalHeaderLabels(rw_names) self.tabellone.horizontalHeader().setResizeMode(QtGui.QHeaderView.Stretch) self.tabellone.verticalHeader().setResizeMode(QtGui.QHeaderView.Stretch) #w_hei = int(700 / rw_num) #ol_wid = int(1024 / col_num) for i in range(0, col_num): self.tabellone.setColumnWidth(i, 150) for j in range(0, rw_num): self.tabellone.setRowHeight(j, 50) self.button_save.clicked.connect(self.readScores) #print(rw_hei, col_wid) #print("finished grid") self.setStyleSheet("font: 16pt \"DejaVu Serif\";\n ") #return None def readScores(self): global nom_lis global var_lis righe = len(nom_lis) colonne = len(var_lis) n = 0 f = open('lista_punteggi.txt','w+') for rig in range(righe): punt = [] for col in range(colonne): pnt = str(self.tabellone.item(rig, col).text()) punt.append(pnt) risultati = "|".join(punt) f.write(risultati + "\n") f.close() self.close() class valorideipesi(QtGui.QMainWindow): def __init__(self): QtGui.QMainWindow.__init__(self) global var_lis, tab_pesi num_r = len(var_lis) sWidget = QtGui.QWidget() tab_pesi = QtGui.QTableWidget() tab_pesi.setColumnCount(1) tab_pesi.setRowCount(num_r) tab_pesi.setVerticalHeaderLabels(var_lis) #ab_pesi.setColumnWidth(0, 300) #or i in range(0, num_r): # tab_pesi.setRowHeight(i, 80) ok = QtGui.QPushButton("OK", sWidget) vlay = QtGui.QVBoxLayout() vlay.addWidget(tab_pesi) vlay.addWidget(ok) sWidget.setLayout(vlay) self.setCentralWidget(sWidget) self.resize(400, 400) ok.clicked.connect(self.switchwind1) self.setStyleSheet("font: 16pt \"DejaVu Serif\";\n ") self.setWindowTitle("BESTAPP Config") tab_pesi.horizontalHeader().setResizeMode(QtGui.QHeaderView.Stretch) tab_pesi.verticalHeader().setResizeMode(QtGui.QHeaderView.Stretch) def switchwind1(self): global mainwind self.saveconstants() self.hide() mainwind = maingui() mainwind.showMaximized() print("connected") def saveconstants(self): global var_lis, pes_lis, tab_pesi top = len(var_lis) for i in range(0, top): pes_lis.append(str(tab_pesi.item(i, 0).text())+"\n") f = open('lista_pes.txt', 'w+') f.writelines(pes_lis) f.close() print (pes_lis) def classifica(): global pes_lis, punt_ordinati, punt_fin_dis, punt_fin_ord, class_fin, nom_lis a = len(pes_lis) divisore = 0 for z in pes_lis: divisore += int(z) top = len(punt_ordinati) for lis_n in range (0, top): lis = punt_ordinati[lis_n] parziali = [] for i in range(1, a): s1 = lis[i] s1i = int(s1) i2 = i - 1 s2 = pes_lis[i2] s2i = int(s2) par = s1i * s2i parziali.append(par) dividendo = 0 for x in parziali: dividendo += int(x) punteggio = float(int(dividendo) / int(divisore)) punt_fin_dis.append(punteggio) punt_fin_ord = punt_fin_dis punt_fin_ord.sort() max = len(punt_fin_dis) for v1 in punt_fin_ord: for n in range(0, max): if v1 == punt_fin_dis[n]: elem = nom_lis[n] class_fin.append(elem) else: pass print(class_fin) print(punt_fin_ord) class myItem(QtGui.QTableWidgetItem): def __init__(self): QtGui.QTableWidgetItem.__init__(self) self.setText("0") if __name__ == "__main__": app = QtGui.QApplication(sys.argv) config = confgui() config.showMaximized() app.exec_()
Roma2Lug-Projects/BEST_App
src/config.py
Python
apache-2.0
8,273
0.003747
from __future__ import with_statement import os from alembic import context from sqlalchemy import engine_from_config, pool from logging.config import fileConfig # this is the Alembic Config object, which provides # access to the values within the .ini file in use. config = context.config # Interpret the config file for Python logging. # This line sets up loggers basically. fileConfig(config.config_file_name) # add your model's MetaData object here # for 'autogenerate' support from moxie.models import Base target_metadata = Base.metadata # other values from the config, defined by the needs of env.py, # can be acquired: # my_important_option = config.get_main_option("my_important_option") # ... etc. def run_migrations_offline(): """Run migrations in 'offline' mode. This configures the context with just a URL and not an Engine, though an Engine is acceptable here as well. By skipping the Engine creation we don't even need a DBAPI to be available. Calls to context.execute() here emit the given string to the script output. """ url = config.get_main_option("sqlalchemy.url") context.configure(url=url, target_metadata=target_metadata) with context.begin_transaction(): context.run_migrations() def run_migrations_online(): """Run migrations in 'online' mode. In this scenario we need to create an Engine and associate a connection with the context. """ dbcfg = config.get_section(config.config_ini_section) if 'DATABASE_URL' in os.environ: dbcfg['sqlalchemy.url'] = os.environ['DATABASE_URL'] engine = engine_from_config( dbcfg, prefix='sqlalchemy.', poolclass=pool.NullPool) connection = engine.connect() context.configure( connection=connection, target_metadata=target_metadata ) try: with context.begin_transaction(): context.run_migrations() finally: connection.close() if context.is_offline_mode(): run_migrations_offline() else: run_migrations_online()
paultag/moxie
migrations/env.py
Python
mit
2,125
0.002353
#@ { #@ "targets": #@ [{ #@ "name":"versioninfo.txt" #@ ,"status_check":"dynamic" #@ ,"dependencies":[{"ref":"maike","rel":"tool"}] #@ }] #@ } import sys import subprocess import shutil import os def modified_time(filename): try: return (os.path.getmtime(filename),True) except (KeyboardInterrupt, SystemExit): raise except: return (0,False) def newer(file_a,file_b): mod_a=modified_time(file_a) mod_b=modified_time(file_b) if mod_a[1]==False and mod_b[1]==False: raise OSError('Error: None of the files %s, and %s are accessible.'%(file_a,file_b)) if not mod_a[1]: return False if not mod_b[1]: return True return mod_a[0] > mod_b[0] def newer_than_all(file_a, files): for file in files: if newer(file,file_a): return False return True def git_changes(): with subprocess.Popen(('git', 'status','--porcelain'),stdout=subprocess.PIPE) \ as git: result=[]; for k in filter(None,git.stdout.read().decode().split('\n')): result.append( k[3:].split(' ')[0] ) return result def get_revision(): if shutil.which('git')==None: with open('versioninfo-in.txt') as versionfile: return versionfile.read().strip() else: with subprocess.Popen(('git', 'describe','--tags','--dirty','--always') \ ,stdout=subprocess.PIPE) as git: result=git.stdout.read().decode().strip() git.wait() status=git.returncode if status: with open('versioninfo-in.txt') as versionfile: return versionfile.read().strip() else: return result def write_error(*args, **kwargs): print(*args,file=sys.stderr,**kwargs) def get_rev_old(): with os.fdopen(os.open(target_dir + '/versioninfo.txt',os.O_RDONLY|os.O_CREAT),'r+') \ as verfile: return verfile.read().strip() try: target_dir=sys.argv[1] in_dir=sys.argv[2] revision=get_revision() rev_old=get_rev_old() if rev_old!=revision: with open(target_dir + '/versioninfo.txt','w') as verfile: verfile.write(revision) with open('versioninfo-in.txt','w') as verfile: verfile.write(revision) sys.exit(0) except Exception: write_error('%s:%d: error: %s\n'%(sys.argv[0],sys.exc_info()[2].tb_lineno,sys.exc_info()[1])) sys.exit(-1)
milasudril/anja
versioninfo.py
Python
gpl-3.0
2,158
0.056534
from django.conf.urls import patterns, url from manoseimas.lobbyists import views urlpatterns = patterns( '', url(r'^lobbyists/?$', views.lobbyists_json, name='lobbyists_json'), url(r'^law_projects/(?P<lobbyist_slug>[^/]+)/?$', views.law_projects_json, name='law_projects_json'), )
ManoSeimas/manoseimas.lt
manoseimas/lobbyists/json_urls.py
Python
agpl-3.0
304
0
# Beedraw/Hive network capable client and server allowing collaboration on a single image # Copyright (C) 2009 Thomas Becker # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA import PyQt4.QtCore as qtcore import PyQt4.QtNetwork as qtnet import socket try: from PyQt4.QtXml import QXmlStreamReader except: from PyQt4.QtCore import QXmlStreamReader import SocketServer from animation import XmlToQueueEventsConverter from sketchlog import SketchLogWriter from beetypes import * from beeutil import * class PyServerEventHandler(SocketServer.BaseRequestHandler,qtcore.QObject): def __init__(self,request,client_address,server,master,parentthread,id): qtcore.QObject.__init__(self) self.master=master self.parentthread=parentthread self.clientid=id self.server=server SocketServer.BaseRequestHandler.__init__(self,request,client_address,server) def handle(self): newsock=BeeSocket(BeeSocketTypes.python,self.request,True) # start the listener, that will authenticate client and finish setup newlistener=HiveClientListener(self,newsock,self.master,self.clientid) newlistener.run() class customPyServer(SocketServer.ThreadingMixIn,SocketServer.TCPServer,qtcore.QObject): def __init__(self,hostport,master,parentthread): qtcore.QObject.__init__(self) SocketServer.TCPServer.__init__(self,hostport,PyServerEventHandler) self.master=master self.parentthread=parentthread self.idlock=qtcore.QReadWriteLock() self.nextid=0 def getNextId(self): lock=qtcore.QWriteLocker(self.idlock) self.nextid+=1 return self.nextid def finish_request(self,request,client_address): PyServerEventHandler(request,client_address,self,self.master,self.parentthread,self.getNextId()) # dont' close the request after we're done in here def close_request(self,request): pass class BeeTCPServer(qtcore.QObject): """ Socket interface to allow changing between different tcp server implementations to see if Qt sockets or standard python sockets are better on each platform.""" def __init__(self,type,port,parentthread,master): if type==BeeSocketTypes.qt: qtcore.QObject.__init__(self,parentthread) self.type=type self.parentthread=parentthread self.master=master self.port=port self.idlock=qtcore.QReadWriteLock() self.nextid=0 def getNextId(self): lock=qtcore.QWriteLocker(self.idlock) self.nextid+=1 return self.nextid def start(self): if self.type==BeeSocketTypes.qt: self.server=qtnet.QTcpServer(self.parentthread) qtcore.QObject.connect(self.server, qtcore.SIGNAL("newConnection()"), self.newConnectionQt) if self.server.listen(qtnet.QHostAddress("0.0.0.0"),self.port): event=HiveServerStatusEvent(HiveServerStatusTypes.running) else: event=HiveServerStatusEvent(HiveServerStatusTypes.starterror,"%s" % self.server.errorString()) BeeApp().app.postEvent(self.master,event) elif self.type==BeeSocketTypes.python: try: self.server=customPyServer(("localhost",self.port),self.master,self.parentthread) except: self.server=None event=HiveServerStatusEvent(HiveServerStatusTypes.starterror) BeeApp().app.postEvent(self.master,event) print_debug("WARNING: failed to create server") if self.server: event=HiveServerStatusEvent(HiveServerStatusTypes.running) BeeApp().app.postEvent(self.master,event) self.server.serve_forever() def stop(self): if self.server: if self.type==BeeSocketTypes.qt: self.server.close() elif self.type==BeeSocketTypes.python: self.server.shutdown() self.server.socket.close() def newConnectionQt(self): print_debug("found new connection") while self.server.hasPendingConnections(): newsock=BeeSocket(BeeSocketTypes.qt,self.server.nextPendingConnection()) # start the listener, that will authenticate client and finish setup newlistener=HiveClientListener(self.parentthread,newsock,self.master,self.getNextId()) # push responsibility to new thread newsock.socket.setParent(None) newsock.socket.moveToThread(newlistener) newlistener.start() class BeeSocket: """ Socket interface to allow changing between different socket implementations (Qt socket or standard python sockets). Qt sockets seem to be buggy under linux, while python sockets aren't as well implemented under windows. Also helps provide blocking interface to Qt sockets which are normally non-blocking. """ def __init__(self,type,socket,connected=False): self.type=type self.socket=socket self.errorStr="" self.connected=connected self.pyconnectlock=qtcore.QReadWriteLock() # set blocking to never time out if self.type==BeeSocketTypes.python: self.socket.settimeout(None) def setPyConnectedState(self,state,lock=None): if not lock: lock=qtcore.QWriteLocker(self.pyconnectlock) self.connected=state def waitForConnected(self): if self.type==BeeSocketTypes.qt: connected=self.socket.waitForConnected() return connected elif self.type==BeeSocketTypes.python: return self.isConnected() def errorString(self): if self.type==BeeSocketTypes.qt: return self.socket.errorString() elif self.type==BeeSocketTypes.python: return self.errorStr def disconnect(self): if self.type==BeeSocketTypes.qt: if not self.isConnected(): return self.socket.disconnectFromHost() elif self.type==BeeSocketTypes.python: lock=qtcore.QWriteLocker(self.pyconnectlock) if not self.isConnected(lock): return self.socket.shutdown(socket.SHUT_RDWR) self.socket.close() self.setPyConnectedState(False,lock) def abort(self): if self.type==BeeSocketTypes.qt: self.socket.abort() def connect(self,host,port): if self.type==BeeSocketTypes.qt: self.socket.connectToHost(host,port) return self.socket.waitForConnected() elif self.type==BeeSocketTypes.python: try: self.socket.connect((host,port)) self.setPyConnectedState(True) except socket.error, errmsg: print_debug("error while connecting: %s" % errmsg) self.setPyConnectedState(False) except: self.errorStr="unknown connection error" self.setPyConnectedState(False) return self.isConnected() def read(self,size): retstring="" if self.type==BeeSocketTypes.qt: # only wait if there isn't data already available if not self.socket.bytesAvailable(): status=self.socket.waitForReadyRead(-1) data=self.socket.read(size) if data: retstring="%s" % qtcore.QString(data) elif self.type==BeeSocketTypes.python: try: retstring=self.socket.recv(size) except socket.error, errmsg: print_debug("exception while trying to read data: %s" % errmsg) self.setPyConnectedState(False) retstring="" except: print_debug("unknown error while trying to read data") self.setPyConnectedState(False) retstring="" return retstring def isConnected(self,lock=None): if self.type==BeeSocketTypes.qt: if self.socket.state()==qtnet.QAbstractSocket.UnconnectedState: return False else: return True elif self.type==BeeSocketTypes.python: if not lock: lock=qtcore.QReadLocker(self.pyconnectlock) return self.connected def write(self,data): if not data: return if self.type==BeeSocketTypes.qt: self.socket.write(data) self.socket.flush() if self.socket.state()!=qtnet.QTcpSocket.UnconnectedState: self.socket.waitForBytesWritten(-1) elif self.type==BeeSocketTypes.python: try: self.socket.sendall(data) except socket.error, errmsg: print_debug("exception while trying to send data: %s" % errmsg) self.setPyConnectedState(False) except: print_debug("unknown exception while trying to send data") self.setPyConnectedState(False) # thread to setup connection, authenticate and then # listen to a socket and add incomming client commands to queue class HiveClientListener(qtcore.QThread): def __init__(self,parent,socket,master,id): qtcore.QThread.__init__(self,parent) self.socket=socket self.master=master self.id=id self.authenticationerror="Unknown Error" def authenticate(self): # attempt to read stream of data, which should include version, username and password # make sure someone dosen't overload the buffer while wating for authentication info authstring=qtcore.QString() while authstring.count('\n')<3 and len(authstring)<512: data=self.socket.read(512) if data: authstring.append(data) # if error exit else: self.authenticationerror="Error: Lost connection during authentication request" return False authlist=authstring.split('\n') # if loop ended without getting enough separators just return false if len(authlist)<3: self.authenticationerror="Error parsing authentication information" return False self.username=authlist[0] password=authlist[1] try: version=int(authlist[2]) except ValueError: self.authenticationerror="Error parsing authentication information" return False if version != PROTOCOL_VERSION: self.authenticationerror="Protocol version mismatch, please change to server version: %d" % PROTOCOL_VERSION return False masterpass=self.master.getPassword() # if password is blank, let authentication pass if masterpass=="": return True # otherwise trim off whitespace and compare to password string if password.trimmed().toAscii()==masterpass: return True self.authenticationerror="Incorrect Password" return False def register(self): # register this new connection self.master.registerReaderThread(self.id,self) return self.master.registerClient(self.username,self.id,self.socket) def disconnected(self): print_debug("disconnecting client with ID: %d" % self.id) self.master.unregisterClient(self.id) def readyRead(self): data=self.socket.read(readybytes) #print_debug("got animation data from socket: %s" % qtcore.QString(data)) self.parser.xml.addData(data) error=self.parser.read() self.socket.waitForBytesWritten() if error!=QXmlStreamReader.PrematureEndOfDocumentError and error!=QXmlStreamReader.NoError: return error return None def run(self): # try to authticate user if not self.authenticate(): # if authentication fails send close socket and exit print_debug("authentication failed") self.socket.write(qtcore.QByteArray("Authtication failed\n%s\n" % self.authenticationerror)) self.socket.disconnect() return print_debug("authentication succeded") if not self.register(): print_debug("Registration with server failed, probably due to duplicate username") self.socket.write(qtcore.QByteArray("Registration Failed\nRegistration with server failed, the username you chose is already in use already, try a different one\n")) self.socket.disconnect() return self.parser=XmlToQueueEventsConverter(None,self.master.curwindow,0,type=ThreadTypes.server,id=self.id) # pass initial data to client here self.socket.write(qtcore.QByteArray("Success\nConnected To Server\n")) # wait for client to respond so it doesn't get confused and mangle the setup data with the start of the XML file data=self.socket.read(1024) #qtcore.QObject.connect(self.socket, qtcore.SIGNAL("readyRead()"), self.readyRead) #qtcore.QObject.connect(self.socket, qtcore.SIGNAL("disconnected()"), self.disconnected) # start writing thread newwriter=HiveClientWriter(self,self.socket,self.master,self.id) newwriter.start() while 1: if not data: print_debug("remote socket closed") break #print_debug("got animation data from socket: %s" % qtcore.QString(data)) self.parser.xml.addData(data) error=self.parser.read() if error!=QXmlStreamReader.PrematureEndOfDocumentError and error!=QXmlStreamReader.NoError: # queue up command for client to be disconnected break if not self.socket.isConnected(): print_debug("found that socket isn't connected") break data=self.socket.read(1024) # this should be run when the socket is disconnected self.disconnected() # this thread will write to a specific client class HiveClientWriter(qtcore.QThread): def __init__(self,parent,socket,master,id): qtcore.QThread.__init__(self) self.setParent(self) self.socket=socket self.master=master self.id=id self.master.registerWriterThread(id,self) self.buffer=qtcore.QBuffer() self.buffer.open(qtcore.QIODevice.ReadWrite) # add to list of writing threads lock=qtcore.QReadLocker(self.master.clientslistmutex) self.queue=self.master.clientwriterqueues[id] # create custom QXmlStreamWriter #self.xmlgenerator=SketchLogWriter(self.socket) self.xmlgenerator=SketchLogWriter(self.buffer) #print "attempting to connect signal" #self.connect(self.queue,qtcore.SIGNAL("datainqueue()"),self,qtcore.SIGNAL("datainqueue()")) #print "attempted to connect signal" def run(self): while 1: if not self.socket.isConnected(): self.master.unregisterClient(self.id) return #print "Hive Client Writer is ready to read from queue:", self.queue # block until item is available from thread safe queue data=self.queue.get() if data[0]==DrawingCommandTypes.quit: self.master.unregisterClient(self.id) print_debug("exiting client writer thread") return #print "Hive Client Writer got command from Queue:", data # write xml data to socket self.xmlgenerator.logCommand(data) datastr="%s" % qtcore.QString(self.buffer.data()) #print_debug("client writer wrote to sending buffer: %s" % datastr) self.socket.write(datastr) self.buffer.buffer().resize(0) self.buffer.seek(0)
skitzycat/beedraw
beenetwork.py
Python
gpl-2.0
14,234
0.039834
#!/usr/bin/python # encoding: utf-8 import subprocess from config import Config applescript_name_tem = "osascript/open_%s.scpt" arg_tem = { "calendar": "%s %s %s", "fantastical": "%s-%s-%s", "busycal": "%s-%s-%s", "google": "%s%s%s" } SOFTWARE = 'software' def open_cal(arg): arg = arg.strip() if arg.endswith(".json"): open_file(arg) else: from workflow import Workflow wf = Workflow() default_software = Config('').load_default(SOFTWARE) software_name = wf.settings.get(SOFTWARE, default_software) file_name = applescript_name_tem % (software_name) year, month, day = arg.split() script_arg = arg_tem[software_name] % (year, month.zfill(2), day.zfill(2)) execute_osascript(file_name, script_arg) def execute_osascript(file, arg): subprocess.call(['osascript', file, arg]) def open_file(file): subprocess.call(['open', file]) if __name__ == "__main__": import sys open_cal(' '.join(sys.argv[1:]))
owenwater/alfred-cal
src/open.py
Python
mit
1,042
0.003839
# -*- coding: utf-8 -*- from behave import given, then from nose.tools import assert_true, assert_regexp_matches import pexpect import re @then(u'I should see how many entries were found') def see_number_of_entries_found(context): expected_text = 'Found total number of \d+ entries.' context.trove.expect(expected_text) output = context.trove.match.string.strip() regexp = re.compile(expected_text) assert_regexp_matches(output, regexp) @then(u'the trove prompt should be shown') @given(u'the trove prompt has been shown') def see_trove_prompt(context): expected_text = '(trove)' context.trove.expect(expected_text) output = context.trove.match.string.strip() regexp = re.compile(expected_text) assert_regexp_matches(output, regexp) @given(u'trove is started with an empty --file option') def trove_starts_with_empty_file_option(context): trove = pexpect.spawn("python trove.py --file") context.trove = trove assert_true(trove.isalive()) @then(u'I should see the "--file missing argument" error message') def see_file_missing_argument_error_message(context): expected_text = 'error: argument --file: expected 1 argument' context.trove.expect(expected_text) output = context.trove.match.string.strip() regexp = re.compile(expected_text) assert_regexp_matches(output, regexp) # vim: expandtab shiftwidth=4 softtabstop=4
purepitch/trove
features/steps/file_command_line_option.py
Python
gpl-3.0
1,396
0.003582
# Django settings for imageuploads project. import os PROJECT_DIR = os.path.dirname(__file__) DEBUG = True TEMPLATE_DEBUG = DEBUG ADMINS = ( # ('Your Name', 'your_email@example.com'), ) MANAGERS = ADMINS DATABASES = { 'default': { 'ENGINE': 'django.db.backends.', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'. 'NAME': '', # Or path to database file if using sqlite3. # The following settings are not used with sqlite3: 'USER': '', 'PASSWORD': '', 'HOST': '', # Empty for localhost through domain sockets or '127.0.0.1' for localhost through TCP. 'PORT': '', # Set to empty string for default. } } # Hosts/domain names that are valid for this site; required if DEBUG is False # See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts ALLOWED_HOSTS = [] # Local time zone for this installation. Choices can be found here: # http://en.wikipedia.org/wiki/List_of_tz_zones_by_name # although not all choices may be available on all operating systems. # In a Windows environment this must be set to your system time zone. TIME_ZONE = 'America/Chicago' # Language code for this installation. All choices can be found here: # http://www.i18nguy.com/unicode/language-identifiers.html LANGUAGE_CODE = 'en-us' SITE_ID = 1 # If you set this to False, Django will make some optimizations so as not # to load the internationalization machinery. USE_I18N = True # If you set this to False, Django will not format dates, numbers and # calendars according to the current locale. USE_L10N = True # If you set this to False, Django will not use timezone-aware datetimes. USE_TZ = True MEDIA_ROOT = os.path.join(PROJECT_DIR, "media") STATIC_ROOT = os.path.join(PROJECT_DIR, "static") MEDIA_URL = "/media/" STATIC_URL = "/static/" # Additional locations of static files STATICFILES_DIRS = ( os.path.join(PROJECT_DIR, "site_static"), ) # List of finder classes that know how to find static files in # various locations. STATICFILES_FINDERS = ( 'django.contrib.staticfiles.finders.FileSystemFinder', 'django.contrib.staticfiles.finders.AppDirectoriesFinder', # 'django.contrib.staticfiles.finders.DefaultStorageFinder', ) # Make this unique, and don't share it with anybody. SECRET_KEY = 'qomeppi59pg-(^lh7o@seb!-9d(yr@5n^=*y9w&(=!yd2p7&e^' # List of callables that know how to import templates from various sources. TEMPLATE_LOADERS = ( 'django.template.loaders.filesystem.Loader', 'django.template.loaders.app_directories.Loader', # 'django.template.loaders.eggs.Loader', ) MIDDLEWARE_CLASSES = ( 'django.middleware.common.CommonMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', # Uncomment the next line for simple clickjacking protection: # 'django.middleware.clickjacking.XFrameOptionsMiddleware', ) ROOT_URLCONF = 'imageuploads.urls' # Python dotted path to the WSGI application used by Django's runserver. WSGI_APPLICATION = 'imageuploads.wsgi.application' TEMPLATE_DIRS = ( os.path.join(PROJECT_DIR, "templates"), ) INSTALLED_APPS = ( 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.sites', 'django.contrib.messages', 'django.contrib.staticfiles', 'south', 'crispy_forms', 'ajaxuploader', 'images', ) SESSION_SERIALIZER = 'django.contrib.sessions.serializers.JSONSerializer' # A sample logging configuration. The only tangible logging # performed by this configuration is to send an email to # the site admins on every HTTP 500 error when DEBUG=False. # See http://docs.djangoproject.com/en/dev/topics/logging for # more details on how to customize your logging configuration. LOGGING = { 'version': 1, 'disable_existing_loggers': False, 'filters': { 'require_debug_false': { '()': 'django.utils.log.RequireDebugFalse' } }, 'handlers': { 'mail_admins': { 'level': 'ERROR', 'filters': ['require_debug_false'], 'class': 'django.utils.log.AdminEmailHandler' } }, 'loggers': { 'django.request': { 'handlers': ['mail_admins'], 'level': 'ERROR', 'propagate': True, }, } } CRISPY_TEMPLATE_PACK = 'bootstrap3' try: execfile(os.path.join(os.path.dirname(__file__), "local_settings.py")) except IOError: pass
archatas/imageuploads
imageuploads/settings.py
Python
gpl-2.0
4,670
0.001285
# ------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- # pylint: disable=no-self-use from azure.core.exceptions import HttpResponseError from .._deserialize import ( process_storage_error) from .._shared.response_handlers import return_response_headers from .._shared.uploads_async import ( upload_data_chunks, DataLakeFileChunkUploader, upload_substream_blocks) def _any_conditions(modified_access_conditions=None, **kwargs): # pylint: disable=unused-argument return any([ modified_access_conditions.if_modified_since, modified_access_conditions.if_unmodified_since, modified_access_conditions.if_none_match, modified_access_conditions.if_match ]) async def upload_datalake_file( # pylint: disable=unused-argument client=None, stream=None, length=None, overwrite=None, validate_content=None, max_concurrency=None, file_settings=None, **kwargs): try: if length == 0: return {} properties = kwargs.pop('properties', None) umask = kwargs.pop('umask', None) permissions = kwargs.pop('permissions', None) path_http_headers = kwargs.pop('path_http_headers', None) modified_access_conditions = kwargs.pop('modified_access_conditions', None) chunk_size = kwargs.pop('chunk_size', 100 * 1024 * 1024) if not overwrite: # if customers didn't specify access conditions, they cannot flush data to existing file if not _any_conditions(modified_access_conditions): modified_access_conditions.if_none_match = '*' if properties or umask or permissions: raise ValueError("metadata, umask and permissions can be set only when overwrite is enabled") if overwrite: response = await client.create( resource='file', path_http_headers=path_http_headers, properties=properties, modified_access_conditions=modified_access_conditions, umask=umask, permissions=permissions, cls=return_response_headers, **kwargs) # this modified_access_conditions will be applied to flush_data to make sure # no other flush between create and the current flush modified_access_conditions.if_match = response['etag'] modified_access_conditions.if_none_match = None modified_access_conditions.if_modified_since = None modified_access_conditions.if_unmodified_since = None use_original_upload_path = file_settings.use_byte_buffer or \ validate_content or chunk_size < file_settings.min_large_chunk_upload_threshold or \ hasattr(stream, 'seekable') and not stream.seekable() or \ not hasattr(stream, 'seek') or not hasattr(stream, 'tell') if use_original_upload_path: await upload_data_chunks( service=client, uploader_class=DataLakeFileChunkUploader, total_size=length, chunk_size=chunk_size, stream=stream, max_concurrency=max_concurrency, validate_content=validate_content, **kwargs) else: await upload_substream_blocks( service=client, uploader_class=DataLakeFileChunkUploader, total_size=length, chunk_size=chunk_size, max_concurrency=max_concurrency, stream=stream, validate_content=validate_content, **kwargs ) return await client.flush_data(position=length, path_http_headers=path_http_headers, modified_access_conditions=modified_access_conditions, close=True, cls=return_response_headers, **kwargs) except HttpResponseError as error: process_storage_error(error)
Azure/azure-sdk-for-python
sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_upload_helper.py
Python
mit
4,443
0.001576
# Lior Hirschfeld # JargonBot # -- Imports -- import re import pickle import random import praw from custombot import RedditBot from time import sleep from define import getDefinition from collections import Counter from nltk.stem import * from sklearn import linear_model # -- Setup Variables -- jargonBot = RedditBot('jargonBot') stemmer = PorterStemmer() with open('count.txt', 'r') as handle: count = [line.split()[0] for line in handle.readlines()] countStemmed = [stemmer.stem(word) for word in count] with open('languages.pickle', 'rb') as handle: languages = pickle.load(handle) # -- Methods -- def jargon(lim, rate, subs, ml=False): searchReddit(lim, rate, subs, ml) # Search Reddit for words that need to be defined, and define them. def searchReddit(lim, rate, subs, ml): for sub in subs: searchSub(sub, lim, ml) jargonBot.updateIds() if ml: jargonBot.updateModels(["popularity", "wLength", "cLength"]) sleep(rate) # Search a sub for words that need to be defined, and define them. def searchSub(sub, lim, ml): if sub not in languages: analyze(sub) subreddit = jargonBot.r.subreddit(sub) subWords = [pair[0] for pair in languages[sub].most_common(10000)] for submission in subreddit.hot(limit=lim): comment_queue = submission.comments[:] while comment_queue: com = comment_queue.pop(0) if not hasattr(com, 'body') or com.id in jargonBot.ids: continue for word in com.body.split(): # Stem the word and check if it is rare enough to be defined. # Find the most similar word in count to the stemmed word. word = stemmer.stem(word) if "'" in word: continue if word not in subWords: for item in countStemmed: if item == word: word = item break if ml: if sub not in jargonBot.models: jargonBot.createModel(sub, [[[1000000, 10, 10]], [10]]) # If ML, after basic checks, predict using the model # to decide whether to reply. if word in count: popularity = count.index(word) else: popularity = 1000000 info = {"popularity": popularity, "wLength": len(word), "cLength": len(com.body), "cID": com.id, "sID": submission.id, "sub": sub} if popularity > 10000: # Sometimes, randomly reply to train the model. if random.random() < jargonBot.models[sub][1]: reply(com, word, ml, info=info) elif jargonBot.models[sub][0].predict([[info["popularity"], info["wLength"], info["cLength"]]]) > 0: reply(com, word, ml, info=info) break else: if word not in count[:400000]: reply(com, word, ml) break jargonBot.ids.append(com.id) comment_queue.extend(com.replies) # Reply to a comment with a word definition. def reply(com, word, ml, info=None): reply = "" # Get the definition of the word (if it exists) result = getDefinition(word) if result != None: # A definition has been found. if result[1] != "": reply += """Definition of **{}**: {}.\n\n>*{}.*""".format(word.lower(), result[0].capitalize(), result[1].capitalize()) else: reply += """Definition of **{}**: {}.""".format(word.lower(), result[0].capitalize()) if ml: reply += """\n\nI am a bot which attempts to define difficult words automatically. I use machine learning to do this, and I can use your feedback to improve. Feel free to leave a comment to let me know what you thought of this definition!""" reply += "\n\n---------\n\n^Check ^out ^my ^[code](https://github.com/lhirschfeld/JargonBot). " reply += " ^Please ^contact ^/u/liortulip ^with" reply += " ^any ^questions ^or ^concerns." try: cID = com.reply(reply) if ml: info["time"] = datetime.now() info["cID"] = cID jargonBot.responses.append(info) print("Replied") except praw.exceptions.APIException as error: print("Hit rate limit error.") jargonBot.updateIds() sleep(600) # Analyze the language of a particular sub. def analyze(sub): print("Analyzing:", sub) subreddit = jargonBot.r.subreddit(sub) words = Counter() for submission in subreddit.hot(limit=300): comment_queue = submission.comments[:] while comment_queue: com = comment_queue.pop(0) if hasattr(com, 'body'): for word in com.body.split(): # Stem the word and add it to the counter. word = stemmer.stem(word) words[word] += 1 languages[sub] = words with open('languages.pickle', 'wb') as handle: pickle.dump(languages, handle, protocol=pickle.HIGHEST_PROTOCOL) print("Analyzation complete.") while True: jargon(50, 10, ["science", "math", "askreddit"]) jargon(50, 10, ["science", "math", "askreddit"], ml=True) print("Completed loop")
lhirschfeld/JargonBot
jargonbot.py
Python
mit
5,805
0.002756
# Copyright 2015 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== # pylint: disable=not-callable # pylint: disable=redefined-builtin """Layers can merge several input tensors into a single output tensor. """ from __future__ import absolute_import from __future__ import division from __future__ import print_function from tensorflow.contrib.keras.python.keras import backend as K from tensorflow.contrib.keras.python.keras.engine.topology import Layer from tensorflow.python.framework import tensor_shape class _Merge(Layer): """Generic merge layer for elementwise merge functions. Used to implement `Sum`, `Average`, etc. Arguments: **kwargs: standard layer keyword arguments. """ def __init__(self, **kwargs): super(_Merge, self).__init__(**kwargs) self.supports_masking = True def _merge_function(self, inputs): raise NotImplementedError def _compute_elemwise_op_output_shape(self, shape1, shape2): """Computes the shape of the resultant of an elementwise operation. Arguments: shape1: tuple or None. Shape of the first tensor shape2: tuple or None. Shape of the second tensor Returns: expected output shape when an element-wise operation is carried out on 2 tensors with shapes shape1 and shape2. tuple or None. Raises: ValueError: if shape1 and shape2 are not compatible for element-wise operations. """ if None in [shape1, shape2]: return None elif len(shape1) < len(shape2): return self._compute_elemwise_op_output_shape(shape2, shape1) elif not shape2: return shape1 output_shape = list(shape1[:-len(shape2)]) for i, j in zip(shape1[-len(shape2):], shape2): if i is None or j is None: output_shape.append(None) elif i == 1: output_shape.append(j) elif j == 1: output_shape.append(i) else: if i != j: raise ValueError('Operands could not be broadcast ' 'together with shapes ' + str(shape1) + ' ' + str(shape2)) output_shape.append(i) return tuple(output_shape) def build(self, input_shape): # Used purely for shape validation. if not isinstance(input_shape, list): raise ValueError('A merge layer should be called ' 'on a list of inputs.') if len(input_shape) < 2: raise ValueError('A merge layer should be called ' 'on a list of at least 2 inputs. ' 'Got ' + str(len(input_shape)) + ' inputs.') input_shape = [tensor_shape.TensorShape(s).as_list() for s in input_shape] batch_sizes = [s[0] for s in input_shape if s is not None] batch_sizes = set(batch_sizes) batch_sizes -= set([None]) if len(batch_sizes) > 1: raise ValueError('Can not merge tensors with different ' 'batch sizes. Got tensors with shapes : ' + str(input_shape)) if input_shape[0] is None: output_shape = None else: output_shape = input_shape[0][1:] for i in range(1, len(input_shape)): if input_shape[i] is None: shape = None else: shape = input_shape[i][1:] output_shape = self._compute_elemwise_op_output_shape(output_shape, shape) # If the inputs have different ranks, we have to reshape them # to make them broadcastable. if None not in input_shape and len(set(map(len, input_shape))) == 1: self._reshape_required = False else: self._reshape_required = True self.built = True def call(self, inputs): if self._reshape_required: reshaped_inputs = [] input_ndims = list(map(K.ndim, inputs)) if None not in input_ndims: # If ranks of all inputs are available, # we simply expand each of them at axis=1 # until all of them have the same rank. max_ndim = max(input_ndims) for x in inputs: x_ndim = K.ndim(x) for _ in range(max_ndim - x_ndim): x = K.expand_dims(x, 1) reshaped_inputs.append(x) return self._merge_function(reshaped_inputs) else: # Transpose all inputs so that batch size is the last dimension. # (batch_size, dim1, dim2, ... ) -> (dim1, dim2, ... , batch_size) transposed = False for x in inputs: x_ndim = K.ndim(x) if x_ndim is None: x_shape = K.shape(x) batch_size = x_shape[0] new_shape = K.concatenate([x_shape[1:], K.expand_dims(batch_size)]) x_transposed = K.reshape(x, K.stack([batch_size, K.prod(x_shape[1:])])) x_transposed = K.permute_dimensions(x_transposed, (1, 0)) x_transposed = K.reshape(x_transposed, new_shape) reshaped_inputs.append(x_transposed) transposed = True elif x_ndim > 1: dims = list(range(1, x_ndim)) + [0] reshaped_inputs.append(K.permute_dimensions(x, dims)) transposed = True else: # We don't transpose inputs if they are 1D vectors or scalars. reshaped_inputs.append(x) y = self._merge_function(reshaped_inputs) y_ndim = K.ndim(y) if transposed: # If inputs have been transposed, we have to transpose the output too. if y_ndim is None: y_shape = K.shape(y) y_ndim = K.shape(y_shape)[0] batch_size = y_shape[y_ndim - 1] new_shape = K.concatenate( [K.expand_dims(batch_size), y_shape[:y_ndim - 1]]) y = K.reshape(y, (-1, batch_size)) y = K.permute_dimensions(y, (1, 0)) y = K.reshape(y, new_shape) elif y_ndim > 1: dims = [y_ndim - 1] + list(range(y_ndim - 1)) y = K.permute_dimensions(y, dims) return y else: return self._merge_function(inputs) def compute_output_shape(self, input_shape): if input_shape[0] is None: output_shape = None else: output_shape = input_shape[0][1:] for i in range(1, len(input_shape)): if input_shape[i] is None: shape = None else: shape = input_shape[i][1:] output_shape = self._compute_elemwise_op_output_shape(output_shape, shape) batch_sizes = [s[0] for s in input_shape if s is not None] batch_sizes = set(batch_sizes) batch_sizes -= set([None]) if len(batch_sizes) == 1: output_shape = (list(batch_sizes)[0],) + output_shape else: output_shape = (None,) + output_shape return output_shape def compute_mask(self, inputs, mask=None): if mask is None: return None if not isinstance(mask, list): raise ValueError('`mask` should be a list.') if not isinstance(inputs, list): raise ValueError('`inputs` should be a list.') if len(mask) != len(inputs): raise ValueError('The lists `inputs` and `mask` ' 'should have the same length.') if all([m is None for m in mask]): return None masks = [K.expand_dims(m, 0) for m in mask if m is not None] return K.all(K.concatenate(masks, axis=0), axis=0, keepdims=False) class Add(_Merge): """Layer that adds a list of inputs. It takes as input a list of tensors, all of the same shape, and returns a single tensor (also of the same shape). """ def _merge_function(self, inputs): output = inputs[0] for i in range(1, len(inputs)): output += inputs[i] return output class Multiply(_Merge): """Layer that multiplies (element-wise) a list of inputs. It takes as input a list of tensors, all of the same shape, and returns a single tensor (also of the same shape). """ def _merge_function(self, inputs): output = inputs[0] for i in range(1, len(inputs)): output *= inputs[i] return output class Average(_Merge): """Layer that averages a list of inputs. It takes as input a list of tensors, all of the same shape, and returns a single tensor (also of the same shape). """ def _merge_function(self, inputs): output = inputs[0] for i in range(1, len(inputs)): output += inputs[i] return output / len(inputs) class Maximum(_Merge): """Layer that computes the maximum (element-wise) a list of inputs. It takes as input a list of tensors, all of the same shape, and returns a single tensor (also of the same shape). """ def _merge_function(self, inputs): output = inputs[0] for i in range(1, len(inputs)): output = K.maximum(output, inputs[i]) return output class Concatenate(_Merge): """Layer that concatenates a list of inputs. It takes as input a list of tensors, all of the same shape expect for the concatenation axis, and returns a single tensor, the concatenation of all inputs. Arguments: axis: Axis along which to concatenate. **kwargs: standard layer keyword arguments. """ def __init__(self, axis=-1, **kwargs): super(Concatenate, self).__init__(**kwargs) self.axis = axis self.supports_masking = True def build(self, input_shape): # Used purely for shape validation. if not isinstance(input_shape, list): raise ValueError('`Concatenate` layer should be called ' 'on a list of inputs') if all([shape is None for shape in input_shape]): return reduced_inputs_shapes = [ tensor_shape.TensorShape(shape).as_list() for shape in input_shape ] shape_set = set() for i in range(len(reduced_inputs_shapes)): del reduced_inputs_shapes[i][self.axis] shape_set.add(tuple(reduced_inputs_shapes[i])) if len(shape_set) > 1: raise ValueError('`Concatenate` layer requires ' 'inputs with matching shapes ' 'except for the concat axis. ' 'Got inputs shapes: %s' % (input_shape)) self.built = True def call(self, inputs): if not isinstance(inputs, list): raise ValueError('A `Concatenate` layer should be called ' 'on a list of inputs.') return K.concatenate(inputs, axis=self.axis) def _compute_output_shape(self, input_shape): if not isinstance(input_shape, list): raise ValueError('A `Concatenate` layer should be called ' 'on a list of inputs.') input_shapes = input_shape output_shape = tensor_shape.TensorShape(input_shapes[0]).as_list() for shape in input_shapes[1:]: shape = tensor_shape.TensorShape(shape).as_list() if output_shape[self.axis] is None or shape[self.axis] is None: output_shape[self.axis] = None break output_shape[self.axis] += shape[self.axis] return tensor_shape.TensorShape(output_shape) def compute_mask(self, inputs, mask=None): if mask is None: return None if not isinstance(mask, list): raise ValueError('`mask` should be a list.') if not isinstance(inputs, list): raise ValueError('`inputs` should be a list.') if len(mask) != len(inputs): raise ValueError('The lists `inputs` and `mask` ' 'should have the same length.') if all([m is None for m in mask]): return None # Make a list of masks while making sure # the dimensionality of each mask # is the same as the corresponding input. masks = [] for input_i, mask_i in zip(inputs, mask): if mask_i is None: # Input is unmasked. Append all 1s to masks, # but cast it to bool first masks.append(K.cast(K.ones_like(input_i), 'bool')) elif K.ndim(mask_i) < K.ndim(input_i): # Mask is smaller than the input, expand it masks.append(K.expand_dims(mask_i)) else: masks.append(mask_i) concatenated = K.concatenate(masks, axis=self.axis) return K.all(concatenated, axis=-1, keepdims=False) def get_config(self): config = { 'axis': self.axis, } base_config = super(Concatenate, self).get_config() return dict(list(base_config.items()) + list(config.items())) class Dot(_Merge): """Layer that computes a dot product between samples in two tensors. E.g. if applied to two tensors `a` and `b` of shape `(batch_size, n)`, the output will be a tensor of shape `(batch_size, 1)` where each entry `i` will be the dot product between `a[i]` and `b[i]`. Arguments: axes: Integer or tuple of integers, axis or axes along which to take the dot product. normalize: Whether to L2-normalize samples along the dot product axis before taking the dot product. If set to True, then the output of the dot product is the cosine proximity between the two samples. **kwargs: Standard layer keyword arguments. """ def __init__(self, axes, normalize=False, **kwargs): super(Dot, self).__init__(**kwargs) if not isinstance(axes, int): if not isinstance(axes, (list, tuple)): raise TypeError('Invalid type for `axes` - ' 'should be a list or an int.') if len(axes) != 2: raise ValueError('Invalid format for `axes` - ' 'should contain two elements.') if not isinstance(axes[0], int) or not isinstance(axes[1], int): raise ValueError('Invalid format for `axes` - ' 'list elements should be "int".') self.axes = axes self.normalize = normalize self.supports_masking = True def build(self, input_shape): # Used purely for shape validation. if not isinstance(input_shape, list) or len(input_shape) != 2: raise ValueError('A `Dot` layer should be called ' 'on a list of 2 inputs.') shape1 = tensor_shape.TensorShape(input_shape[0]).as_list() shape2 = tensor_shape.TensorShape(input_shape[1]).as_list() if shape1 is None or shape2 is None: return if isinstance(self.axes, int): if self.axes < 0: axes = [self.axes % len(shape1), self.axes % len(shape2)] else: axes = [self.axes] * 2 else: axes = self.axes if shape1[axes[0]] != shape2[axes[1]]: raise ValueError('Dimension incompatibility ' '%s != %s. ' % (shape1[axes[0]], shape2[axes[1]]) + 'Layer shapes: %s, %s' % (shape1, shape2)) self.built = True def call(self, inputs): x1 = inputs[0] x2 = inputs[1] if isinstance(self.axes, int): if self.axes < 0: axes = [self.axes % K.ndim(x1), self.axes % K.ndim(x2)] else: axes = [self.axes] * 2 else: axes = [] for i in range(len(self.axes)): if self.axes[i] < 0: axes.append(self.axes[i] % K.ndim(inputs[i])) else: axes.append(self.axes[i]) if self.normalize: x1 = K.l2_normalize(x1, axis=axes[0]) x2 = K.l2_normalize(x2, axis=axes[1]) output = K.batch_dot(x1, x2, axes) return output def _compute_output_shape(self, input_shape): if not isinstance(input_shape, list) or len(input_shape) != 2: raise ValueError('A `Dot` layer should be called ' 'on a list of 2 inputs.') shape1 = tensor_shape.TensorShape(input_shape[0]).as_list() shape2 = tensor_shape.TensorShape(input_shape[1]).as_list() if isinstance(self.axes, int): if self.axes < 0: axes = [self.axes % len(shape1), self.axes % len(shape2)] else: axes = [self.axes] * 2 else: axes = self.axes shape1.pop(axes[0]) shape2.pop(axes[1]) shape2.pop(0) output_shape = shape1 + shape2 if len(output_shape) == 1: output_shape += [1] return tensor_shape.TensorShape(output_shape) def compute_mask(self, inputs, mask=None): return None def get_config(self): config = { 'axes': self.axes, 'normalize': self.normalize, } base_config = super(Dot, self).get_config() return dict(list(base_config.items()) + list(config.items())) def add(inputs, **kwargs): """Functional interface to the `Add` layer. Arguments: inputs: A list of input tensors (at least 2). **kwargs: Standard layer keyword arguments. Returns: A tensor, the sum of the inputs. """ return Add(**kwargs)(inputs) def multiply(inputs, **kwargs): """Functional interface to the `Multiply` layer. Arguments: inputs: A list of input tensors (at least 2). **kwargs: Standard layer keyword arguments. Returns: A tensor, the element-wise product of the inputs. """ return Multiply(**kwargs)(inputs) def average(inputs, **kwargs): """Functional interface to the `Average` layer. Arguments: inputs: A list of input tensors (at least 2). **kwargs: Standard layer keyword arguments. Returns: A tensor, the average of the inputs. """ return Average(**kwargs)(inputs) def maximum(inputs, **kwargs): """Functional interface to the `Maximum` layer. Arguments: inputs: A list of input tensors (at least 2). **kwargs: Standard layer keyword arguments. Returns: A tensor, the element-wise maximum of the inputs. """ return Maximum(**kwargs)(inputs) def concatenate(inputs, axis=-1, **kwargs): """Functional interface to the `Concatenate` layer. Arguments: inputs: A list of input tensors (at least 2). axis: Concatenation axis. **kwargs: Standard layer keyword arguments. Returns: A tensor, the concatenation of the inputs alongside axis `axis`. """ return Concatenate(axis=axis, **kwargs)(inputs) def dot(inputs, axes, normalize=False, **kwargs): """Functional interface to the `Dot` layer. Arguments: inputs: A list of input tensors (at least 2). axes: Integer or tuple of integers, axis or axes along which to take the dot product. normalize: Whether to L2-normalize samples along the dot product axis before taking the dot product. If set to True, then the output of the dot product is the cosine proximity between the two samples. **kwargs: Standard layer keyword arguments. Returns: A tensor, the dot product of the samples from the inputs. """ return Dot(axes=axes, normalize=normalize, **kwargs)(inputs)
unnikrishnankgs/va
venv/lib/python3.5/site-packages/tensorflow/contrib/keras/python/keras/layers/merge.py
Python
bsd-2-clause
18,999
0.007158
#!/usr/bin/env python # Copyright 2021 - Gustavo Montamat # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ awreporting.py AdWords API reporting module suitable for large scale reports. """ import csv import logging import os import queue import shutil import tempfile from time import sleep from awreporting.accounts import get_account_ids from awreporting.reporting_threads import ReportDownloader, ReportDecompressor, END_SIGNAL def read_query(query_file): try: with open(query_file, 'r') as fin: query = fin.read().replace('\r', '').replace('\n', ' ') except Exception as e: logging.exception("Could not read query file.") return return query def merge_output(output, path): first = True with open(output, 'w') as fout: csv_writer = csv.writer(fout, delimiter=',', quotechar='"') for file_name in os.listdir(path): if file_name[-4:] == '.csv': file_path = os.path.join(path, file_name) with open(file_path, 'r') as fin: csv_reader = csv.reader(fin, delimiter=',', quotechar='"') if not first: next(csv_reader, None) # Skip headers else: first = False for row in csv_reader: csv_writer.writerow(row) def get_report(token, awql_query, output, threads, account_ids=None): if account_ids is None: logging.info("Retrieving all AdWords account ids.") account_ids = get_account_ids(token) if not account_ids: logging.error("No account ids where found. Check token.") return logging.info("Creating temporal directory.") temporal_path = tempfile.mkdtemp() # Create a queue with all the account ids queue_ids = queue.Queue() [queue_ids.put(account_id) for account_id in account_ids] while True: queue_decompress = queue.Queue() queue_fails = queue.Queue() # Initialize two decompressor threads logging.info("Starting ReportDecompressor threads.") for i in range(2): report_decompressor = ReportDecompressor( queue_decompress, queue_fails, temporal_path ) report_decompressor.daemon = True report_decompressor.start() # Initialize downloader threads pool logging.info("Starting ReportDownloader threads.") max_threads = min(queue_ids.qsize(), threads) for i in range(max_threads): if queue_ids.qsize() == 0: break report_downloader = ReportDownloader( token, queue_ids, queue_decompress, awql_query, temporal_path ) report_downloader.daemon = True report_downloader.start() sleep(0.1) logging.info("Used {thread_num} threads.".format(thread_num=i + 1)) # Wait until all the account ids have been processed queue_ids.join() queue_ids.put(END_SIGNAL) # Wait until all gzipped reports have been extracted queue_decompress.join() queue_decompress.put(END_SIGNAL) if queue_fails.qsize() == 0: break # Restart job with failed downloads queue_ids = queue.Queue() [queue_ids.put(account_id) for account_id in queue_fails.get()] logging.info("All reports have been obtained.") merge_output(output, temporal_path) shutil.rmtree(temporal_path)
gmontamat/pyaw-reporting
awreporting/awreporting.py
Python
apache-2.0
4,023
0.000249
#!/usr/bin/env python # # Released under the BSD license. See LICENSE file for details. """ This program basically does face detection an blurs the face out. """ print __doc__ from SimpleCV import Camera, Display, HaarCascade # Initialize the camera cam = Camera() # Create the display to show the image display = Display() # Haar Cascade face detection, only faces haarcascade = HaarCascade("face") # Loop forever while display.isNotDone(): # Get image, flip it so it looks mirrored, scale to speed things up img = cam.getImage().flipHorizontal().scale(0.5) # Load in trained face file faces = img.findHaarFeatures(haarcascade) # Pixelize the detected face if faces: bb = faces[-1].boundingBox() img = img.pixelize(10, region=(bb[0], bb[1], bb[2], bb[3])) # Display the image img.save(display)
nils-werner/SimpleCV
SimpleCV/examples/detection/facetrack.py
Python
bsd-3-clause
849
0.002356
"""SCons.Tool.gcc Tool-specific initialization for gcc. There normally shouldn't be any need to import this module directly. It will usually be imported through the generic SCons.Tool.Tool() selection method. """ # # Copyright (c) 2001 - 2017 The SCons Foundation # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY # KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE # WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. # __revision__ = "src/engine/SCons/Tool/gcc.py 74b2c53bc42290e911b334a6b44f187da698a668 2017/11/14 13:16:53 bdbaddog" from . import cc import os import re import subprocess import SCons.Util compilers = ['gcc', 'cc'] def generate(env): """Add Builders and construction variables for gcc to an Environment.""" if 'CC' not in env: env['CC'] = env.Detect(compilers) or compilers[0] cc.generate(env) if env['PLATFORM'] in ['cygwin', 'win32']: env['SHCCFLAGS'] = SCons.Util.CLVar('$CCFLAGS') else: env['SHCCFLAGS'] = SCons.Util.CLVar('$CCFLAGS -fPIC') # determine compiler version version = detect_version(env, env['CC']) if version: env['CCVERSION'] = version def exists(env): # is executable, and is a GNU compiler (or accepts '--version' at least) return detect_version(env, env.Detect(env.get('CC', compilers))) def detect_version(env, cc): """Return the version of the GNU compiler, or None if it is not a GNU compiler.""" cc = env.subst(cc) if not cc: return None version = None #pipe = SCons.Action._subproc(env, SCons.Util.CLVar(cc) + ['-dumpversion'], pipe = SCons.Action._subproc(env, SCons.Util.CLVar(cc) + ['--version'], stdin = 'devnull', stderr = 'devnull', stdout = subprocess.PIPE) # -dumpversion was added in GCC 3.0. As long as we're supporting # GCC versions older than that, we should use --version and a # regular expression. #line = pipe.stdout.read().strip() #if line: # version = line line = SCons.Util.to_str(pipe.stdout.readline()) match = re.search(r'[0-9]+(\.[0-9]+)+', line) if match: version = match.group(0) # Non-GNU compiler's output (like AIX xlc's) may exceed the stdout buffer: # So continue with reading to let the child process actually terminate. while SCons.Util.to_str(pipe.stdout.readline()): pass ret = pipe.wait() if ret != 0: return None return version # Local Variables: # tab-width:4 # indent-tabs-mode:nil # End: # vim: set expandtab tabstop=4 shiftwidth=4:
mapycz/mapnik
scons/scons-local-3.0.1/SCons/Tool/gcc.py
Python
lgpl-2.1
3,530
0.003966
# -*- coding: utf-8 -*- ############################################################################### # # InitializeOAuth # Generates an authorization URL that an application can use to complete the first step in the OAuth process. # # Python versions 2.6, 2.7, 3.x # # Copyright 2014, Temboo Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, # either express or implied. See the License for the specific # language governing permissions and limitations under the License. # # ############################################################################### from temboo.core.choreography import Choreography from temboo.core.choreography import InputSet from temboo.core.choreography import ResultSet from temboo.core.choreography import ChoreographyExecution import json class InitializeOAuth(Choreography): def __init__(self, temboo_session): """ Create a new instance of the InitializeOAuth Choreo. A TembooSession object, containing a valid set of Temboo credentials, must be supplied. """ super(InitializeOAuth, self).__init__(temboo_session, '/Library/Bitly/OAuth/InitializeOAuth') def new_input_set(self): return InitializeOAuthInputSet() def _make_result_set(self, result, path): return InitializeOAuthResultSet(result, path) def _make_execution(self, session, exec_id, path): return InitializeOAuthChoreographyExecution(session, exec_id, path) class InitializeOAuthInputSet(InputSet): """ An InputSet with methods appropriate for specifying the inputs to the InitializeOAuth Choreo. The InputSet object is used to specify input parameters when executing this Choreo. """ def set_AccountName(self, value): """ Set the value of the AccountName input for this Choreo. ((optional, string) Deprecated (retained for backward compatibility only).) """ super(InitializeOAuthInputSet, self)._set_input('AccountName', value) def set_AppKeyName(self, value): """ Set the value of the AppKeyName input for this Choreo. ((optional, string) Deprecated (retained for backward compatibility only).) """ super(InitializeOAuthInputSet, self)._set_input('AppKeyName', value) def set_AppKeyValue(self, value): """ Set the value of the AppKeyValue input for this Choreo. ((optional, string) Deprecated (retained for backward compatibility only).) """ super(InitializeOAuthInputSet, self)._set_input('AppKeyValue', value) def set_ClientID(self, value): """ Set the value of the ClientID input for this Choreo. ((required, string) The Client ID provided by Bitly after registering your application.) """ super(InitializeOAuthInputSet, self)._set_input('ClientID', value) def set_CustomCallbackID(self, value): """ Set the value of the CustomCallbackID input for this Choreo. ((optional, string) A unique identifier that you can pass to eliminate the need to wait for a Temboo generated CallbackID. Callback identifiers may only contain numbers, letters, periods, and hyphens.) """ super(InitializeOAuthInputSet, self)._set_input('CustomCallbackID', value) def set_ForwardingURL(self, value): """ Set the value of the ForwardingURL input for this Choreo. ((optional, string) The URL that Temboo will redirect your users to after they grant access to your application. This should include the "https://" or "http://" prefix and be a fully qualified URL.) """ super(InitializeOAuthInputSet, self)._set_input('ForwardingURL', value) class InitializeOAuthResultSet(ResultSet): """ A ResultSet with methods tailored to the values returned by the InitializeOAuth Choreo. The ResultSet object is used to retrieve the results of a Choreo execution. """ def getJSONFromString(self, str): return json.loads(str) def get_AuthorizationURL(self): """ Retrieve the value for the "AuthorizationURL" output from this Choreo execution. ((string) The authorization URL that the application's user needs to go to in order to grant access to your application.) """ return self._output.get('AuthorizationURL', None) def get_CallbackID(self): """ Retrieve the value for the "CallbackID" output from this Choreo execution. ((string) An ID used to retrieve the callback data that Temboo stores once your application's user authorizes.) """ return self._output.get('CallbackID', None) class InitializeOAuthChoreographyExecution(ChoreographyExecution): def _make_result_set(self, response, path): return InitializeOAuthResultSet(response, path)
jordanemedlock/psychtruths
temboo/core/Library/Bitly/OAuth/InitializeOAuth.py
Python
apache-2.0
5,111
0.005087
#!/usr/bin/env python3 # Copyright (c) 2017 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test RPC calls related to net. Tests correspond to code in rpc/net.cpp. """ import time from test_framework.test_framework import StatusquoTestFramework from test_framework.util import ( assert_equal, assert_raises_jsonrpc, connect_nodes_bi, p2p_port, ) class NetTest(StatusquoTestFramework): def __init__(self): super().__init__() self.setup_clean_chain = True self.num_nodes = 2 def run_test(self): self._test_connection_count() self._test_getnettotals() self._test_getnetworkinginfo() self._test_getaddednodeinfo() self._test_getpeerinfo() def _test_connection_count(self): # connect_nodes_bi connects each node to the other assert_equal(self.nodes[0].getconnectioncount(), 2) def _test_getnettotals(self): # check that getnettotals totalbytesrecv and totalbytessent # are consistent with getpeerinfo peer_info = self.nodes[0].getpeerinfo() assert_equal(len(peer_info), 2) net_totals = self.nodes[0].getnettotals() assert_equal(sum([peer['bytesrecv'] for peer in peer_info]), net_totals['totalbytesrecv']) assert_equal(sum([peer['bytessent'] for peer in peer_info]), net_totals['totalbytessent']) # test getnettotals and getpeerinfo by doing a ping # the bytes sent/received should change # note ping and pong are 32 bytes each self.nodes[0].ping() time.sleep(0.1) peer_info_after_ping = self.nodes[0].getpeerinfo() net_totals_after_ping = self.nodes[0].getnettotals() for before, after in zip(peer_info, peer_info_after_ping): assert_equal(before['bytesrecv_per_msg']['pong'] + 32, after['bytesrecv_per_msg']['pong']) assert_equal(before['bytessent_per_msg']['ping'] + 32, after['bytessent_per_msg']['ping']) assert_equal(net_totals['totalbytesrecv'] + 32*2, net_totals_after_ping['totalbytesrecv']) assert_equal(net_totals['totalbytessent'] + 32*2, net_totals_after_ping['totalbytessent']) def _test_getnetworkinginfo(self): assert_equal(self.nodes[0].getnetworkinfo()['networkactive'], True) assert_equal(self.nodes[0].getnetworkinfo()['connections'], 2) self.nodes[0].setnetworkactive(False) assert_equal(self.nodes[0].getnetworkinfo()['networkactive'], False) timeout = 3 while self.nodes[0].getnetworkinfo()['connections'] != 0: # Wait a bit for all sockets to close assert timeout > 0, 'not all connections closed in time' timeout -= 0.1 time.sleep(0.1) self.nodes[0].setnetworkactive(True) connect_nodes_bi(self.nodes, 0, 1) assert_equal(self.nodes[0].getnetworkinfo()['networkactive'], True) assert_equal(self.nodes[0].getnetworkinfo()['connections'], 2) def _test_getaddednodeinfo(self): assert_equal(self.nodes[0].getaddednodeinfo(), []) # add a node (node2) to node0 ip_port = "127.0.0.1:{}".format(p2p_port(2)) self.nodes[0].addnode(ip_port, 'add') # check that the node has indeed been added added_nodes = self.nodes[0].getaddednodeinfo(ip_port) assert_equal(len(added_nodes), 1) assert_equal(added_nodes[0]['addednode'], ip_port) # check that a non-existant node returns an error assert_raises_jsonrpc(-24, "Node has not been added", self.nodes[0].getaddednodeinfo, '1.1.1.1') def _test_getpeerinfo(self): peer_info = [x.getpeerinfo() for x in self.nodes] # check both sides of bidirectional connection between nodes # the address bound to on one side will be the source address for the other node assert_equal(peer_info[0][0]['addrbind'], peer_info[1][0]['addr']) assert_equal(peer_info[1][0]['addrbind'], peer_info[0][0]['addr']) if __name__ == '__main__': NetTest().main()
Exgibichi/statusquo
test/functional/net.py
Python
mit
4,228
0.001419
def tryprint(): return ('it will be oke')
cherylyli/stress-aid
env/lib/python3.5/site-packages/helowrld/__init__.py
Python
mit
45
0.022222
# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo.config import cfg import sys from rally.openstack.common.gettextutils import _ from rally.openstack.common import log as logging LOG = logging.getLogger(__name__) exc_log_opts = [ cfg.BoolOpt('fatal_exception_format_errors', default=False, help='make exception message format errors fatal'), ] CONF = cfg.CONF CONF.register_opts(exc_log_opts) class RallyException(Exception): """Base Rally Exception To correctly use this class, inherit from it and define a 'msg_fmt' property. That msg_fmt will get printf'd with the keyword arguments provided to the constructor. """ msg_fmt = _("An unknown exception occurred.") def __init__(self, message=None, **kwargs): self.kwargs = kwargs if 'code' not in self.kwargs: try: self.kwargs['code'] = self.code except AttributeError: pass if not message: try: message = self.msg_fmt % kwargs except KeyError: exc_info = sys.exc_info() # kwargs doesn't match a variable in the message # log the issue and the kwargs msg = "kwargs don't match in string format operation: %s" LOG.debug(msg % kwargs, exc_info=exc_info) if CONF.fatal_exception_format_errors: raise exc_info[0], exc_info[1], exc_info[2] else: # at least get the core message out if something happened message = self.msg_fmt super(RallyException, self).__init__(message) def format_message(self): if self.__class__.__name__.endswith('_Remote'): return self.args[0] else: return unicode(self) class ImmutableException(RallyException): msg_fmt = _("This object is immutable.") class InvalidArgumentsException(RallyException): msg_fmt = _("Invalid arguments: '%(message)s'") class InvalidConfigException(RallyException): msg_fmt = _("This config has invalid schema: `%(message)s`") class InvalidRunnerResult(RallyException): msg_fmt = _("Type of result of `%(name)s` runner should be" " `base.ScenarioRunnerResult`. Got: `%(results_type)s`") class InvalidTaskException(InvalidConfigException): msg_fmt = _("This config is invalid: `%(message)s`") class InvalidTaskConfigException(InvalidTaskException): msg_fmt = _("This config has invalid schema: `%(message)s`") class NotFoundScenarios(InvalidTaskException): msg_fmt = _("There are no benchmark scenarios with names: `%(names)s`.") class InvalidBenchmarkConfig(InvalidTaskException): msg_fmt = _("Task config is invalid.\n" "\tBenchmark %(name)s has wrong configuration of args at" " position %(pos)s: %(args)s" "\n\tReason: %(reason)s") class TestException(RallyException): msg_fmt = _("Test failed: %(test_message)s") class NotFoundException(RallyException): msg_fmt = _("Not found.") class NoSuchEngine(NotFoundException): msg_fmt = _("There is no engine with name `%(engine_name)s`.") class NoSuchVMProvider(NotFoundException): msg_fmt = _("There is no vm provider with name `%(vm_provider_name)s`.") class NoSuchScenario(NotFoundException): msg_fmt = _("There is no benchmark scenario with name `%(name)s`.") class NoSuchRunner(NotFoundException): msg_fmt = _("There is no benchmark runner with type `%(type)s`.") class NoSuchContext(NotFoundException): msg_fmt = _("There is no benchmark context with name `%(name)s`.") class NoSuchConfigField(NotFoundException): msg_fmt = _("There is no field in the task config with name `%(name)s`.") class TaskNotFound(NotFoundException): msg_fmt = _("Task with uuid=%(uuid)s not found.") class DeploymentNotFound(NotFoundException): msg_fmt = _("Deployment with uuid=%(uuid)s not found.") class DeploymentIsBusy(RallyException): msg_fmt = _("There are allocated resources for the deployment with " "uuid=%(uuid)s.") class ResourceNotFound(NotFoundException): msg_fmt = _("Resource with id=%(id)s not found.") class TimeoutException(RallyException): msg_fmt = _("Timeout exceeded.") class GetResourceFailure(RallyException): msg_fmt = _("Failed to get the resource %(resource)s: %(err)s") class GetResourceNotFound(GetResourceFailure): msg_fmt = _("Resource %(resource)s is not found.") class GetResourceErrorStatus(GetResourceFailure): msg_fmt = _("Resouce %(resource)s has %(status)s status: %(fault)s") class SSHError(RallyException): msg_fmt = _("Remote command failed.") class TaskInvalidStatus(RallyException): msg_fmt = _("Task `%(uuid)s` in `%(actual)s` status but `%(require)s` is " "required.") class ChecksumMismatch(RallyException): msg_fmt = _("Checksum mismatch for image: %(url)s") class InvalidAdminException(InvalidArgumentsException): msg_fmt = _("user %(username)s doesn't have 'admin' role") class InvalidEndpointsException(InvalidArgumentsException): msg_fmt = _("wrong keystone credentials specified in your endpoint" " properties. (HTTP 401)") class HostUnreachableException(InvalidArgumentsException): msg_fmt = _("unable to establish connection to the remote host: %(url)s") class InvalidScenarioArgument(RallyException): msg_fmt = _("Invalid scenario argument: '%(message)s'")
ytsarev/rally
rally/exceptions.py
Python
apache-2.0
6,154
0
import pytest import numpy as np from numpy.testing import assert_array_almost_equal, assert_array_equal, assert_allclose from sklearn.datasets import load_linnerud from sklearn.cross_decomposition._pls import ( _center_scale_xy, _get_first_singular_vectors_power_method, _get_first_singular_vectors_svd, _svd_flip_1d, ) from sklearn.cross_decomposition import CCA from sklearn.cross_decomposition import PLSSVD, PLSRegression, PLSCanonical from sklearn.datasets import make_regression from sklearn.utils import check_random_state from sklearn.utils.extmath import svd_flip from sklearn.exceptions import ConvergenceWarning def assert_matrix_orthogonal(M): K = np.dot(M.T, M) assert_array_almost_equal(K, np.diag(np.diag(K))) def test_pls_canonical_basics(): # Basic checks for PLSCanonical d = load_linnerud() X = d.data Y = d.target pls = PLSCanonical(n_components=X.shape[1]) pls.fit(X, Y) assert_matrix_orthogonal(pls.x_weights_) assert_matrix_orthogonal(pls.y_weights_) assert_matrix_orthogonal(pls._x_scores) assert_matrix_orthogonal(pls._y_scores) # Check X = TP' and Y = UQ' T = pls._x_scores P = pls.x_loadings_ U = pls._y_scores Q = pls.y_loadings_ # Need to scale first Xc, Yc, x_mean, y_mean, x_std, y_std = _center_scale_xy( X.copy(), Y.copy(), scale=True ) assert_array_almost_equal(Xc, np.dot(T, P.T)) assert_array_almost_equal(Yc, np.dot(U, Q.T)) # Check that rotations on training data lead to scores Xt = pls.transform(X) assert_array_almost_equal(Xt, pls._x_scores) Xt, Yt = pls.transform(X, Y) assert_array_almost_equal(Xt, pls._x_scores) assert_array_almost_equal(Yt, pls._y_scores) # Check that inverse_transform works X_back = pls.inverse_transform(Xt) assert_array_almost_equal(X_back, X) _, Y_back = pls.inverse_transform(Xt, Yt) assert_array_almost_equal(Y_back, Y) def test_sanity_check_pls_regression(): # Sanity check for PLSRegression # The results were checked against the R-packages plspm, misOmics and pls d = load_linnerud() X = d.data Y = d.target pls = PLSRegression(n_components=X.shape[1]) X_trans, _ = pls.fit_transform(X, Y) # FIXME: one would expect y_trans == pls.y_scores_ but this is not # the case. # xref: https://github.com/scikit-learn/scikit-learn/issues/22420 assert_allclose(X_trans, pls.x_scores_) expected_x_weights = np.array( [ [-0.61330704, -0.00443647, 0.78983213], [-0.74697144, -0.32172099, -0.58183269], [-0.25668686, 0.94682413, -0.19399983], ] ) expected_x_loadings = np.array( [ [-0.61470416, -0.24574278, 0.78983213], [-0.65625755, -0.14396183, -0.58183269], [-0.51733059, 1.00609417, -0.19399983], ] ) expected_y_weights = np.array( [ [+0.32456184, 0.29892183, 0.20316322], [+0.42439636, 0.61970543, 0.19320542], [-0.13143144, -0.26348971, -0.17092916], ] ) expected_y_loadings = np.array( [ [+0.32456184, 0.29892183, 0.20316322], [+0.42439636, 0.61970543, 0.19320542], [-0.13143144, -0.26348971, -0.17092916], ] ) assert_array_almost_equal(np.abs(pls.x_loadings_), np.abs(expected_x_loadings)) assert_array_almost_equal(np.abs(pls.x_weights_), np.abs(expected_x_weights)) assert_array_almost_equal(np.abs(pls.y_loadings_), np.abs(expected_y_loadings)) assert_array_almost_equal(np.abs(pls.y_weights_), np.abs(expected_y_weights)) # The R / Python difference in the signs should be consistent across # loadings, weights, etc. x_loadings_sign_flip = np.sign(pls.x_loadings_ / expected_x_loadings) x_weights_sign_flip = np.sign(pls.x_weights_ / expected_x_weights) y_weights_sign_flip = np.sign(pls.y_weights_ / expected_y_weights) y_loadings_sign_flip = np.sign(pls.y_loadings_ / expected_y_loadings) assert_array_almost_equal(x_loadings_sign_flip, x_weights_sign_flip) assert_array_almost_equal(y_loadings_sign_flip, y_weights_sign_flip) def test_sanity_check_pls_regression_constant_column_Y(): # Check behavior when the first column of Y is constant # The results are checked against a modified version of plsreg2 # from the R-package plsdepot d = load_linnerud() X = d.data Y = d.target Y[:, 0] = 1 pls = PLSRegression(n_components=X.shape[1]) pls.fit(X, Y) expected_x_weights = np.array( [ [-0.6273573, 0.007081799, 0.7786994], [-0.7493417, -0.277612681, -0.6011807], [-0.2119194, 0.960666981, -0.1794690], ] ) expected_x_loadings = np.array( [ [-0.6273512, -0.22464538, 0.7786994], [-0.6643156, -0.09871193, -0.6011807], [-0.5125877, 1.01407380, -0.1794690], ] ) expected_y_loadings = np.array( [ [0.0000000, 0.0000000, 0.0000000], [0.4357300, 0.5828479, 0.2174802], [-0.1353739, -0.2486423, -0.1810386], ] ) assert_array_almost_equal(np.abs(expected_x_weights), np.abs(pls.x_weights_)) assert_array_almost_equal(np.abs(expected_x_loadings), np.abs(pls.x_loadings_)) # For the PLSRegression with default parameters, y_loadings == y_weights assert_array_almost_equal(np.abs(pls.y_loadings_), np.abs(expected_y_loadings)) assert_array_almost_equal(np.abs(pls.y_weights_), np.abs(expected_y_loadings)) x_loadings_sign_flip = np.sign(expected_x_loadings / pls.x_loadings_) x_weights_sign_flip = np.sign(expected_x_weights / pls.x_weights_) # we ignore the first full-zeros row for y y_loadings_sign_flip = np.sign(expected_y_loadings[1:] / pls.y_loadings_[1:]) assert_array_equal(x_loadings_sign_flip, x_weights_sign_flip) assert_array_equal(x_loadings_sign_flip[1:], y_loadings_sign_flip) def test_sanity_check_pls_canonical(): # Sanity check for PLSCanonical # The results were checked against the R-package plspm d = load_linnerud() X = d.data Y = d.target pls = PLSCanonical(n_components=X.shape[1]) pls.fit(X, Y) expected_x_weights = np.array( [ [-0.61330704, 0.25616119, -0.74715187], [-0.74697144, 0.11930791, 0.65406368], [-0.25668686, -0.95924297, -0.11817271], ] ) expected_x_rotations = np.array( [ [-0.61330704, 0.41591889, -0.62297525], [-0.74697144, 0.31388326, 0.77368233], [-0.25668686, -0.89237972, -0.24121788], ] ) expected_y_weights = np.array( [ [+0.58989127, 0.7890047, 0.1717553], [+0.77134053, -0.61351791, 0.16920272], [-0.23887670, -0.03267062, 0.97050016], ] ) expected_y_rotations = np.array( [ [+0.58989127, 0.7168115, 0.30665872], [+0.77134053, -0.70791757, 0.19786539], [-0.23887670, -0.00343595, 0.94162826], ] ) assert_array_almost_equal(np.abs(pls.x_rotations_), np.abs(expected_x_rotations)) assert_array_almost_equal(np.abs(pls.x_weights_), np.abs(expected_x_weights)) assert_array_almost_equal(np.abs(pls.y_rotations_), np.abs(expected_y_rotations)) assert_array_almost_equal(np.abs(pls.y_weights_), np.abs(expected_y_weights)) x_rotations_sign_flip = np.sign(pls.x_rotations_ / expected_x_rotations) x_weights_sign_flip = np.sign(pls.x_weights_ / expected_x_weights) y_rotations_sign_flip = np.sign(pls.y_rotations_ / expected_y_rotations) y_weights_sign_flip = np.sign(pls.y_weights_ / expected_y_weights) assert_array_almost_equal(x_rotations_sign_flip, x_weights_sign_flip) assert_array_almost_equal(y_rotations_sign_flip, y_weights_sign_flip) assert_matrix_orthogonal(pls.x_weights_) assert_matrix_orthogonal(pls.y_weights_) assert_matrix_orthogonal(pls._x_scores) assert_matrix_orthogonal(pls._y_scores) def test_sanity_check_pls_canonical_random(): # Sanity check for PLSCanonical on random data # The results were checked against the R-package plspm n = 500 p_noise = 10 q_noise = 5 # 2 latents vars: rng = check_random_state(11) l1 = rng.normal(size=n) l2 = rng.normal(size=n) latents = np.array([l1, l1, l2, l2]).T X = latents + rng.normal(size=4 * n).reshape((n, 4)) Y = latents + rng.normal(size=4 * n).reshape((n, 4)) X = np.concatenate((X, rng.normal(size=p_noise * n).reshape(n, p_noise)), axis=1) Y = np.concatenate((Y, rng.normal(size=q_noise * n).reshape(n, q_noise)), axis=1) pls = PLSCanonical(n_components=3) pls.fit(X, Y) expected_x_weights = np.array( [ [0.65803719, 0.19197924, 0.21769083], [0.7009113, 0.13303969, -0.15376699], [0.13528197, -0.68636408, 0.13856546], [0.16854574, -0.66788088, -0.12485304], [-0.03232333, -0.04189855, 0.40690153], [0.1148816, -0.09643158, 0.1613305], [0.04792138, -0.02384992, 0.17175319], [-0.06781, -0.01666137, -0.18556747], [-0.00266945, -0.00160224, 0.11893098], [-0.00849528, -0.07706095, 0.1570547], [-0.00949471, -0.02964127, 0.34657036], [-0.03572177, 0.0945091, 0.3414855], [0.05584937, -0.02028961, -0.57682568], [0.05744254, -0.01482333, -0.17431274], ] ) expected_x_loadings = np.array( [ [0.65649254, 0.1847647, 0.15270699], [0.67554234, 0.15237508, -0.09182247], [0.19219925, -0.67750975, 0.08673128], [0.2133631, -0.67034809, -0.08835483], [-0.03178912, -0.06668336, 0.43395268], [0.15684588, -0.13350241, 0.20578984], [0.03337736, -0.03807306, 0.09871553], [-0.06199844, 0.01559854, -0.1881785], [0.00406146, -0.00587025, 0.16413253], [-0.00374239, -0.05848466, 0.19140336], [0.00139214, -0.01033161, 0.32239136], [-0.05292828, 0.0953533, 0.31916881], [0.04031924, -0.01961045, -0.65174036], [0.06172484, -0.06597366, -0.1244497], ] ) expected_y_weights = np.array( [ [0.66101097, 0.18672553, 0.22826092], [0.69347861, 0.18463471, -0.23995597], [0.14462724, -0.66504085, 0.17082434], [0.22247955, -0.6932605, -0.09832993], [0.07035859, 0.00714283, 0.67810124], [0.07765351, -0.0105204, -0.44108074], [-0.00917056, 0.04322147, 0.10062478], [-0.01909512, 0.06182718, 0.28830475], [0.01756709, 0.04797666, 0.32225745], ] ) expected_y_loadings = np.array( [ [0.68568625, 0.1674376, 0.0969508], [0.68782064, 0.20375837, -0.1164448], [0.11712173, -0.68046903, 0.12001505], [0.17860457, -0.6798319, -0.05089681], [0.06265739, -0.0277703, 0.74729584], [0.0914178, 0.00403751, -0.5135078], [-0.02196918, -0.01377169, 0.09564505], [-0.03288952, 0.09039729, 0.31858973], [0.04287624, 0.05254676, 0.27836841], ] ) assert_array_almost_equal(np.abs(pls.x_loadings_), np.abs(expected_x_loadings)) assert_array_almost_equal(np.abs(pls.x_weights_), np.abs(expected_x_weights)) assert_array_almost_equal(np.abs(pls.y_loadings_), np.abs(expected_y_loadings)) assert_array_almost_equal(np.abs(pls.y_weights_), np.abs(expected_y_weights)) x_loadings_sign_flip = np.sign(pls.x_loadings_ / expected_x_loadings) x_weights_sign_flip = np.sign(pls.x_weights_ / expected_x_weights) y_weights_sign_flip = np.sign(pls.y_weights_ / expected_y_weights) y_loadings_sign_flip = np.sign(pls.y_loadings_ / expected_y_loadings) assert_array_almost_equal(x_loadings_sign_flip, x_weights_sign_flip) assert_array_almost_equal(y_loadings_sign_flip, y_weights_sign_flip) assert_matrix_orthogonal(pls.x_weights_) assert_matrix_orthogonal(pls.y_weights_) assert_matrix_orthogonal(pls._x_scores) assert_matrix_orthogonal(pls._y_scores) def test_convergence_fail(): # Make sure ConvergenceWarning is raised if max_iter is too small d = load_linnerud() X = d.data Y = d.target pls_nipals = PLSCanonical(n_components=X.shape[1], max_iter=2) with pytest.warns(ConvergenceWarning): pls_nipals.fit(X, Y) @pytest.mark.parametrize("Est", (PLSSVD, PLSRegression, PLSCanonical)) def test_attibutes_shapes(Est): # Make sure attributes are of the correct shape depending on n_components d = load_linnerud() X = d.data Y = d.target n_components = 2 pls = Est(n_components=n_components) pls.fit(X, Y) assert all( attr.shape[1] == n_components for attr in (pls.x_weights_, pls.y_weights_) ) @pytest.mark.parametrize("Est", (PLSRegression, PLSCanonical, CCA)) def test_univariate_equivalence(Est): # Ensure 2D Y with 1 column is equivalent to 1D Y d = load_linnerud() X = d.data Y = d.target est = Est(n_components=1) one_d_coeff = est.fit(X, Y[:, 0]).coef_ two_d_coeff = est.fit(X, Y[:, :1]).coef_ assert one_d_coeff.shape == two_d_coeff.shape assert_array_almost_equal(one_d_coeff, two_d_coeff) @pytest.mark.parametrize("Est", (PLSRegression, PLSCanonical, CCA, PLSSVD)) def test_copy(Est): # check that the "copy" keyword works d = load_linnerud() X = d.data Y = d.target X_orig = X.copy() # copy=True won't modify inplace pls = Est(copy=True).fit(X, Y) assert_array_equal(X, X_orig) # copy=False will modify inplace with pytest.raises(AssertionError): Est(copy=False).fit(X, Y) assert_array_almost_equal(X, X_orig) if Est is PLSSVD: return # PLSSVD does not support copy param in predict or transform X_orig = X.copy() with pytest.raises(AssertionError): pls.transform(X, Y, copy=False), assert_array_almost_equal(X, X_orig) X_orig = X.copy() with pytest.raises(AssertionError): pls.predict(X, copy=False), assert_array_almost_equal(X, X_orig) # Make sure copy=True gives same transform and predictions as predict=False assert_array_almost_equal( pls.transform(X, Y, copy=True), pls.transform(X.copy(), Y.copy(), copy=False) ) assert_array_almost_equal( pls.predict(X, copy=True), pls.predict(X.copy(), copy=False) ) def _generate_test_scale_and_stability_datasets(): """Generate dataset for test_scale_and_stability""" # dataset for non-regression 7818 rng = np.random.RandomState(0) n_samples = 1000 n_targets = 5 n_features = 10 Q = rng.randn(n_targets, n_features) Y = rng.randn(n_samples, n_targets) X = np.dot(Y, Q) + 2 * rng.randn(n_samples, n_features) + 1 X *= 1000 yield X, Y # Data set where one of the features is constraint X, Y = load_linnerud(return_X_y=True) # causes X[:, -1].std() to be zero X[:, -1] = 1.0 yield X, Y X = np.array([[0.0, 0.0, 1.0], [1.0, 0.0, 0.0], [2.0, 2.0, 2.0], [3.0, 5.0, 4.0]]) Y = np.array([[0.1, -0.2], [0.9, 1.1], [6.2, 5.9], [11.9, 12.3]]) yield X, Y # Seeds that provide a non-regression test for #18746, where CCA fails seeds = [530, 741] for seed in seeds: rng = np.random.RandomState(seed) X = rng.randn(4, 3) Y = rng.randn(4, 2) yield X, Y @pytest.mark.parametrize("Est", (CCA, PLSCanonical, PLSRegression, PLSSVD)) @pytest.mark.parametrize("X, Y", _generate_test_scale_and_stability_datasets()) def test_scale_and_stability(Est, X, Y): """scale=True is equivalent to scale=False on centered/scaled data This allows to check numerical stability over platforms as well""" X_s, Y_s, *_ = _center_scale_xy(X, Y) X_score, Y_score = Est(scale=True).fit_transform(X, Y) X_s_score, Y_s_score = Est(scale=False).fit_transform(X_s, Y_s) assert_allclose(X_s_score, X_score, atol=1e-4) assert_allclose(Y_s_score, Y_score, atol=1e-4) @pytest.mark.parametrize("Est", (PLSSVD, PLSCanonical, CCA)) @pytest.mark.parametrize( "n_components, err_type, err_msg", [ (0, ValueError, "n_components == 0, must be >= 1."), (4, ValueError, "n_components == 4, must be <= 3."), ( 2.0, TypeError, "n_components must be an instance of int", ), ], ) def test_n_components_bounds(Est, n_components, err_type, err_msg): """Check the validation of `n_components` for `PLS` regressors.""" rng = np.random.RandomState(0) X = rng.randn(10, 5) Y = rng.randn(10, 3) est = Est(n_components=n_components) with pytest.raises(err_type, match=err_msg): est.fit(X, Y) @pytest.mark.parametrize( "n_components, err_type, err_msg", [ (0, ValueError, "n_components == 0, must be >= 1."), (6, ValueError, "n_components == 6, must be <= 5."), ( 2.0, TypeError, "n_components must be an instance of int", ), ], ) def test_n_components_bounds_pls_regression(n_components, err_type, err_msg): """Check the validation of `n_components` for `PLSRegression`.""" rng = np.random.RandomState(0) X = rng.randn(10, 5) Y = rng.randn(10, 3) est = PLSRegression(n_components=n_components) with pytest.raises(err_type, match=err_msg): est.fit(X, Y) @pytest.mark.parametrize("n_samples, n_features", [(100, 10), (100, 200)]) @pytest.mark.parametrize("seed", range(10)) def test_singular_value_helpers(n_samples, n_features, seed): # Make sure SVD and power method give approximately the same results X, Y = make_regression(n_samples, n_features, n_targets=5, random_state=seed) u1, v1, _ = _get_first_singular_vectors_power_method(X, Y, norm_y_weights=True) u2, v2 = _get_first_singular_vectors_svd(X, Y) _svd_flip_1d(u1, v1) _svd_flip_1d(u2, v2) rtol = 1e-1 assert_allclose(u1, u2, rtol=rtol) assert_allclose(v1, v2, rtol=rtol) def test_one_component_equivalence(): # PLSSVD, PLSRegression and PLSCanonical should all be equivalent when # n_components is 1 X, Y = make_regression(100, 10, n_targets=5, random_state=0) svd = PLSSVD(n_components=1).fit(X, Y).transform(X) reg = PLSRegression(n_components=1).fit(X, Y).transform(X) canonical = PLSCanonical(n_components=1).fit(X, Y).transform(X) assert_allclose(svd, reg, rtol=1e-2) assert_allclose(svd, canonical, rtol=1e-2) def test_svd_flip_1d(): # Make sure svd_flip_1d is equivalent to svd_flip u = np.array([1, -4, 2]) v = np.array([1, 2, 3]) u_expected, v_expected = svd_flip(u.reshape(-1, 1), v.reshape(1, -1)) _svd_flip_1d(u, v) # inplace assert_allclose(u, u_expected.ravel()) assert_allclose(u, [-1, 4, -2]) assert_allclose(v, v_expected.ravel()) assert_allclose(v, [-1, -2, -3]) def test_loadings_converges(): """Test that CCA converges. Non-regression test for #19549.""" X, y = make_regression(n_samples=200, n_features=20, n_targets=20, random_state=20) cca = CCA(n_components=10, max_iter=500) with pytest.warns(None) as record: cca.fit(X, y) # ConvergenceWarning should not be raised assert not [w.message for w in record] # Loadings converges to reasonable values assert np.all(np.abs(cca.x_loadings_) < 1) def test_pls_constant_y(): """Checks warning when y is constant. Non-regression test for #19831""" rng = np.random.RandomState(42) x = rng.rand(100, 3) y = np.zeros(100) pls = PLSRegression() msg = "Y residual is constant at iteration" with pytest.warns(UserWarning, match=msg): pls.fit(x, y) assert_allclose(pls.x_rotations_, 0) @pytest.mark.parametrize("Klass", [CCA, PLSSVD, PLSRegression, PLSCanonical]) def test_pls_feature_names_out(Klass): """Check `get_feature_names_out` cross_decomposition module.""" X, Y = load_linnerud(return_X_y=True) est = Klass().fit(X, Y) names_out = est.get_feature_names_out() class_name_lower = Klass.__name__.lower() expected_names_out = np.array( [f"{class_name_lower}{i}" for i in range(est.x_weights_.shape[1])], dtype=object, ) assert_array_equal(names_out, expected_names_out)
manhhomienbienthuy/scikit-learn
sklearn/cross_decomposition/tests/test_pls.py
Python
bsd-3-clause
20,619
0.001261
from __future__ import unicode_literals from datetime import date from django.conf import settings from django.contrib.auth.models import User, Group, Permission, AnonymousUser from django.contrib.auth.tests.utils import skipIfCustomUser from django.contrib.auth.tests.custom_user import ExtensionUser, CustomPermissionsUser, CustomUser from django.contrib.contenttypes.models import ContentType from django.core.exceptions import ImproperlyConfigured, PermissionDenied from django.contrib.auth import authenticate from django.test import TestCase from django.test.utils import override_settings class BaseModelBackendTest(object): """ A base class for tests that need to validate the ModelBackend with different User models. Subclasses should define a class level UserModel attribute, and a create_users() method to construct two users for test purposes. """ backend = 'django.contrib.auth.backends.ModelBackend' def setUp(self): self.curr_auth = settings.AUTHENTICATION_BACKENDS settings.AUTHENTICATION_BACKENDS = (self.backend,) self.create_users() def tearDown(self): settings.AUTHENTICATION_BACKENDS = self.curr_auth # The custom_perms test messes with ContentTypes, which will # be cached; flush the cache to ensure there are no side effects # Refs #14975, #14925 ContentType.objects.clear_cache() def test_has_perm(self): user = self.UserModel._default_manager.get(pk=self.user.pk) self.assertEqual(user.has_perm('auth.test'), False) user.is_staff = True user.save() self.assertEqual(user.has_perm('auth.test'), False) user.is_superuser = True user.save() self.assertEqual(user.has_perm('auth.test'), True) user.is_staff = False user.is_superuser = False user.save() self.assertEqual(user.has_perm('auth.test'), False) user.is_staff = True user.is_superuser = True user.is_active = False user.save() self.assertEqual(user.has_perm('auth.test'), False) def test_custom_perms(self): user = self.UserModel._default_manager.get(pk=self.user.pk) content_type = ContentType.objects.get_for_model(Group) perm = Permission.objects.create(name='test', content_type=content_type, codename='test') user.user_permissions.add(perm) user.save() # reloading user to purge the _perm_cache user = self.UserModel._default_manager.get(pk=self.user.pk) self.assertEqual(user.get_all_permissions() == set(['auth.test']), True) self.assertEqual(user.get_group_permissions(), set([])) self.assertEqual(user.has_module_perms('Group'), False) self.assertEqual(user.has_module_perms('auth'), True) perm = Permission.objects.create(name='test2', content_type=content_type, codename='test2') user.user_permissions.add(perm) user.save() perm = Permission.objects.create(name='test3', content_type=content_type, codename='test3') user.user_permissions.add(perm) user.save() user = self.UserModel._default_manager.get(pk=self.user.pk) self.assertEqual(user.get_all_permissions(), set(['auth.test2', 'auth.test', 'auth.test3'])) self.assertEqual(user.has_perm('test'), False) self.assertEqual(user.has_perm('auth.test'), True) self.assertEqual(user.has_perms(['auth.test2', 'auth.test3']), True) perm = Permission.objects.create(name='test_group', content_type=content_type, codename='test_group') group = Group.objects.create(name='test_group') group.permissions.add(perm) group.save() user.groups.add(group) user = self.UserModel._default_manager.get(pk=self.user.pk) exp = set(['auth.test2', 'auth.test', 'auth.test3', 'auth.test_group']) self.assertEqual(user.get_all_permissions(), exp) self.assertEqual(user.get_group_permissions(), set(['auth.test_group'])) self.assertEqual(user.has_perms(['auth.test3', 'auth.test_group']), True) user = AnonymousUser() self.assertEqual(user.has_perm('test'), False) self.assertEqual(user.has_perms(['auth.test2', 'auth.test3']), False) def test_has_no_object_perm(self): """Regressiontest for #12462""" user = self.UserModel._default_manager.get(pk=self.user.pk) content_type = ContentType.objects.get_for_model(Group) perm = Permission.objects.create(name='test', content_type=content_type, codename='test') user.user_permissions.add(perm) user.save() self.assertEqual(user.has_perm('auth.test', 'object'), False) self.assertEqual(user.get_all_permissions('object'), set([])) self.assertEqual(user.has_perm('auth.test'), True) self.assertEqual(user.get_all_permissions(), set(['auth.test'])) def test_get_all_superuser_permissions(self): "A superuser has all permissions. Refs #14795" user = self.UserModel._default_manager.get(pk=self.superuser.pk) self.assertEqual(len(user.get_all_permissions()), len(Permission.objects.all())) @skipIfCustomUser class ModelBackendTest(BaseModelBackendTest, TestCase): """ Tests for the ModelBackend using the default User model. """ UserModel = User def create_users(self): self.user = User.objects.create_user( username='test', email='test@example.com', password='test', ) self.superuser = User.objects.create_superuser( username='test2', email='test2@example.com', password='test', ) @override_settings(AUTH_USER_MODEL='auth.ExtensionUser') class ExtensionUserModelBackendTest(BaseModelBackendTest, TestCase): """ Tests for the ModelBackend using the custom ExtensionUser model. This isn't a perfect test, because both the User and ExtensionUser are synchronized to the database, which wouldn't ordinary happen in production. As a result, it doesn't catch errors caused by the non- existence of the User table. The specific problem is queries on .filter(groups__user) et al, which makes an implicit assumption that the user model is called 'User'. In production, the auth.User table won't exist, so the requested join won't exist either; in testing, the auth.User *does* exist, and so does the join. However, the join table won't contain any useful data; for testing, we check that the data we expect actually does exist. """ UserModel = ExtensionUser def create_users(self): self.user = ExtensionUser._default_manager.create_user( username='test', email='test@example.com', password='test', date_of_birth=date(2006, 4, 25) ) self.superuser = ExtensionUser._default_manager.create_superuser( username='test2', email='test2@example.com', password='test', date_of_birth=date(1976, 11, 8) ) @override_settings(AUTH_USER_MODEL='auth.CustomPermissionsUser') class CustomPermissionsUserModelBackendTest(BaseModelBackendTest, TestCase): """ Tests for the ModelBackend using the CustomPermissionsUser model. As with the ExtensionUser test, this isn't a perfect test, because both the User and CustomPermissionsUser are synchronized to the database, which wouldn't ordinary happen in production. """ UserModel = CustomPermissionsUser def create_users(self): self.user = CustomPermissionsUser._default_manager.create_user( email='test@example.com', password='test', date_of_birth=date(2006, 4, 25) ) self.superuser = CustomPermissionsUser._default_manager.create_superuser( email='test2@example.com', password='test', date_of_birth=date(1976, 11, 8) ) @override_settings(AUTH_USER_MODEL='auth.CustomUser') class CustomUserModelBackendAuthenticateTest(TestCase): """ Tests that the model backend can accept a credentials kwarg labeled with custom user model's USERNAME_FIELD. """ def test_authenticate(self): test_user = CustomUser._default_manager.create_user( email='test@example.com', password='test', date_of_birth=date(2006, 4, 25) ) authenticated_user = authenticate(email='test@example.com', password='test') self.assertEqual(test_user, authenticated_user) class TestObj(object): pass class SimpleRowlevelBackend(object): def has_perm(self, user, perm, obj=None): if not obj: return # We only support row level perms if isinstance(obj, TestObj): if user.username == 'test2': return True elif user.is_anonymous() and perm == 'anon': return True elif not user.is_active and perm == 'inactive': return True return False def has_module_perms(self, user, app_label): if not user.is_anonymous() and not user.is_active: return False return app_label == "app1" def get_all_permissions(self, user, obj=None): if not obj: return [] # We only support row level perms if not isinstance(obj, TestObj): return ['none'] if user.is_anonymous(): return ['anon'] if user.username == 'test2': return ['simple', 'advanced'] else: return ['simple'] def get_group_permissions(self, user, obj=None): if not obj: return # We only support row level perms if not isinstance(obj, TestObj): return ['none'] if 'test_group' in [group.name for group in user.groups.all()]: return ['group_perm'] else: return ['none'] @skipIfCustomUser class RowlevelBackendTest(TestCase): """ Tests for auth backend that supports object level permissions """ backend = 'django.contrib.auth.tests.auth_backends.SimpleRowlevelBackend' def setUp(self): self.curr_auth = settings.AUTHENTICATION_BACKENDS settings.AUTHENTICATION_BACKENDS = tuple(self.curr_auth) + (self.backend,) self.user1 = User.objects.create_user('test', 'test@example.com', 'test') self.user2 = User.objects.create_user('test2', 'test2@example.com', 'test') self.user3 = User.objects.create_user('test3', 'test3@example.com', 'test') def tearDown(self): settings.AUTHENTICATION_BACKENDS = self.curr_auth # The get_group_permissions test messes with ContentTypes, which will # be cached; flush the cache to ensure there are no side effects # Refs #14975, #14925 ContentType.objects.clear_cache() def test_has_perm(self): self.assertEqual(self.user1.has_perm('perm', TestObj()), False) self.assertEqual(self.user2.has_perm('perm', TestObj()), True) self.assertEqual(self.user2.has_perm('perm'), False) self.assertEqual(self.user2.has_perms(['simple', 'advanced'], TestObj()), True) self.assertEqual(self.user3.has_perm('perm', TestObj()), False) self.assertEqual(self.user3.has_perm('anon', TestObj()), False) self.assertEqual(self.user3.has_perms(['simple', 'advanced'], TestObj()), False) def test_get_all_permissions(self): self.assertEqual(self.user1.get_all_permissions(TestObj()), set(['simple'])) self.assertEqual(self.user2.get_all_permissions(TestObj()), set(['simple', 'advanced'])) self.assertEqual(self.user2.get_all_permissions(), set([])) def test_get_group_permissions(self): group = Group.objects.create(name='test_group') self.user3.groups.add(group) self.assertEqual(self.user3.get_group_permissions(TestObj()), set(['group_perm'])) class AnonymousUserBackendTest(TestCase): """ Tests for AnonymousUser delegating to backend. """ backend = 'django.contrib.auth.tests.auth_backends.SimpleRowlevelBackend' def setUp(self): self.curr_auth = settings.AUTHENTICATION_BACKENDS settings.AUTHENTICATION_BACKENDS = (self.backend,) self.user1 = AnonymousUser() def tearDown(self): settings.AUTHENTICATION_BACKENDS = self.curr_auth def test_has_perm(self): self.assertEqual(self.user1.has_perm('perm', TestObj()), False) self.assertEqual(self.user1.has_perm('anon', TestObj()), True) def test_has_perms(self): self.assertEqual(self.user1.has_perms(['anon'], TestObj()), True) self.assertEqual(self.user1.has_perms(['anon', 'perm'], TestObj()), False) def test_has_module_perms(self): self.assertEqual(self.user1.has_module_perms("app1"), True) self.assertEqual(self.user1.has_module_perms("app2"), False) def test_get_all_permissions(self): self.assertEqual(self.user1.get_all_permissions(TestObj()), set(['anon'])) @skipIfCustomUser @override_settings(AUTHENTICATION_BACKENDS=[]) class NoBackendsTest(TestCase): """ Tests that an appropriate error is raised if no auth backends are provided. """ def setUp(self): self.user = User.objects.create_user('test', 'test@example.com', 'test') def test_raises_exception(self): self.assertRaises(ImproperlyConfigured, self.user.has_perm, ('perm', TestObj(),)) @skipIfCustomUser class InActiveUserBackendTest(TestCase): """ Tests for a inactive user """ backend = 'django.contrib.auth.tests.auth_backends.SimpleRowlevelBackend' def setUp(self): self.curr_auth = settings.AUTHENTICATION_BACKENDS settings.AUTHENTICATION_BACKENDS = (self.backend,) self.user1 = User.objects.create_user('test', 'test@example.com', 'test') self.user1.is_active = False self.user1.save() def tearDown(self): settings.AUTHENTICATION_BACKENDS = self.curr_auth def test_has_perm(self): self.assertEqual(self.user1.has_perm('perm', TestObj()), False) self.assertEqual(self.user1.has_perm('inactive', TestObj()), True) def test_has_module_perms(self): self.assertEqual(self.user1.has_module_perms("app1"), False) self.assertEqual(self.user1.has_module_perms("app2"), False) class PermissionDeniedBackend(object): """ Always raises PermissionDenied. """ supports_object_permissions = True supports_anonymous_user = True supports_inactive_user = True def authenticate(self, username=None, password=None): raise PermissionDenied @skipIfCustomUser class PermissionDeniedBackendTest(TestCase): """ Tests that other backends are not checked once a backend raises PermissionDenied """ backend = 'django.contrib.auth.tests.auth_backends.PermissionDeniedBackend' def setUp(self): self.user1 = User.objects.create_user('test', 'test@example.com', 'test') self.user1.save() @override_settings(AUTHENTICATION_BACKENDS=(backend, ) + tuple(settings.AUTHENTICATION_BACKENDS)) def test_permission_denied(self): "user is not authenticated after a backend raises permission denied #2550" self.assertEqual(authenticate(username='test', password='test'), None) @override_settings(AUTHENTICATION_BACKENDS=tuple( settings.AUTHENTICATION_BACKENDS) + (backend, )) def test_authenticates(self): self.assertEqual(authenticate(username='test', password='test'), self.user1)
mammique/django
django/contrib/auth/tests/auth_backends.py
Python
bsd-3-clause
15,671
0.002106
# Download the Python helper library from twilio.com/docs/python/install from twilio.rest import Client # Your Account Sid and Auth Token from twilio.com/user/account account_sid = "ACCOUNT_SID" auth_token = "your_auth_token" client = Client(account_sid, auth_token) number = client.lookups.phone_numbers("+16502530000").fetch( type="caller-name", ) print(number.carrier['type']) print(number.carrier['name'])
teoreteetik/api-snippets
lookups/lookup-get-cname-example-1/lookup-get-cname-example-1.6.x.py
Python
mit
417
0
from chainer.iterators import multiprocess_iterator from chainer.iterators import serial_iterator MultiprocessIterator = multiprocess_iterator.MultiprocessIterator SerialIterator = serial_iterator.SerialIterator
kikusu/chainer
chainer/iterators/__init__.py
Python
mit
214
0
# -*- coding: utf-8 -*- # Generated by Django 1.10.2 on 2017-01-03 10:30 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('main', '0026_auto_20161215_2204'), ] operations = [ migrations.AddField( model_name='lan', name='show_calendar', field=models.BooleanField(default=False, help_text='Hvorvidt en kalender skal vises på forsiden. Slå kun dette til hvis turneringer og andre events efterhånden er ved at være klar.', verbose_name='Vis kalender'), ), migrations.AddField( model_name='tournament', name='end', field=models.DateTimeField(null=True, verbose_name='Slut'), ), migrations.AddField( model_name='tournament', name='start', field=models.DateTimeField(null=True, verbose_name='Start'), ), ]
bomjacob/htxaarhuslan
main/migrations/0027_auto_20170103_1130.py
Python
mit
977
0.001028
# Copyright (C) 2016 Deloitte Argentina. # This file is part of CodexGigas - https://github.com/codexgigassys/ # See the file 'LICENSE' for copying permission. from PlugIns.PlugIn import PlugIn class CypherPlug(PlugIn): def __init__(self, sample=None): PlugIn.__init__(self, sample) def getPath(self): return "particular_header.cypher" def getName(self): return "cypher" def getVersion(self): return 1 def process(self): return "Not_implemented"
codexgigassys/codex-backend
src/PlugIns/PE/CypherPlug.py
Python
mit
513
0
# Copyright (c) Twisted Matrix Laboratories. # See LICENSE for details. """ Windows constants for IOCP """ # this stuff should really be gotten from Windows headers via pyrex, but it # probably is not going to change ERROR_PORT_UNREACHABLE = 1234 ERROR_NETWORK_UNREACHABLE = 1231 ERROR_CONNECTION_REFUSED = 1225 ERROR_IO_PENDING = 997 ERROR_OPERATION_ABORTED = 995 WAIT_TIMEOUT = 258 ERROR_NETNAME_DELETED = 64 ERROR_HANDLE_EOF = 38 INFINITE = -1 SO_UPDATE_CONNECT_CONTEXT = 0x7010 SO_UPDATE_ACCEPT_CONTEXT = 0x700B
hlzz/dotfiles
graphics/VTK-7.0.0/ThirdParty/Twisted/twisted/internet/iocpreactor/const.py
Python
bsd-3-clause
550
0.001818
from sklearn2sql_heroku.tests.regression import generic as reg_gen reg_gen.test_model("LGBMRegressor" , "boston" , "postgresql")
antoinecarme/sklearn2sql_heroku
tests/regression/boston/ws_boston_LGBMRegressor_postgresql_code_gen.py
Python
bsd-3-clause
131
0.015267
# run scripts/jobslave-nodatabase.py import os os.environ["SEAMLESS_COMMUNION_ID"] = "simple-remote" os.environ["SEAMLESS_COMMUNION_INCOMING"] = "localhost:8602" import seamless seamless.set_ncores(0) from seamless import communion_server communion_server.configure_master( buffer=True, transformation_job=True, transformation_status=True, ) from seamless.core import context, cell, transformer, unilink ctx = context(toplevel=True) ctx.cell1 = cell().set(1) ctx.cell2 = cell().set(2) ctx.result = cell() ctx.tf = transformer({ "a": "input", "b": "input", "c": "output" }) ctx.cell1_unilink = unilink(ctx.cell1) ctx.cell1_unilink.connect(ctx.tf.a) ctx.cell2.connect(ctx.tf.b) ctx.code = cell("transformer").set("c = a + b") ctx.code.connect(ctx.tf.code) ctx.result_unilink = unilink(ctx.result) ctx.tf.c.connect(ctx.result_unilink) ctx.result_copy = cell() ctx.result.connect(ctx.result_copy) ctx.compute(0.1) print(ctx.cell1.value) print(ctx.code.value) ctx.compute() print(ctx.result.value, ctx.status) print(ctx.tf.exception) ctx.cell1.set(10) ctx.compute() print(ctx.result.value, ctx.status) ctx.code.set("c = a + b + 1000") ctx.compute() print(ctx.result.value, ctx.status) print("Introduce delay...") ctx.code.set("import time; time.sleep(2); c = -(a + b)") ctx.compute(1.0) print("after 1.0 sec...") print(ctx.result.value, ctx.status) print("...") ctx.compute() print(ctx.result.value, ctx.status)
sjdv1982/seamless
tests/lowlevel/simple-remote.py
Python
mit
1,438
0.002086
import pkg_resources from string import Template model_template = Template(pkg_resources.resource_string(__name__, "model_template.C")) lorentz_calc_template = Template(pkg_resources.resource_string(__name__, "lorentz_calc_template.C")) sconstruct_template = Template(pkg_resources.resource_string(__name__, "sconstruct_template")) run_card_template = Template(pkg_resources.resource_string(__name__, "run_card_template"))
cms-externals/sherpa
MODEL/UFO/templates.py
Python
gpl-3.0
427
0.009368
"""Clean an input BAM file to work with downstream pipelines. GATK and Picard based pipelines have specific requirements for chromosome order, run group information and other BAM formatting. This provides a pipeline to prepare and resort an input. """ import os import sys import pysam from bcbio import bam, broad, utils from bcbio.bam import ref from bcbio.distributed.transaction import file_transaction, tx_tmpdir from bcbio.heterogeneity import chromhacks from bcbio.ngsalign import novoalign from bcbio.pipeline import datadict as dd from bcbio.provenance import do def fixrg(in_bam, names, ref_file, dirs, data): """Fix read group in a file, using samtools addreplacerg. addreplacerg does not remove the old read group, causing confusion when checking. We use reheader to work around this """ work_dir = utils.safe_makedir(os.path.join(dd.get_work_dir(data), "bamclean", dd.get_sample_name(data))) out_file = os.path.join(work_dir, "%s-fixrg.bam" % utils.splitext_plus(os.path.basename(in_bam))[0]) if not utils.file_exists(out_file): out_file = os.path.join(work_dir, "%s-fixrg.bam" % dd.get_sample_name(data)) if not utils.file_uptodate(out_file, in_bam): with file_transaction(data, out_file) as tx_out_file: rg_info = novoalign.get_rg_info(names) new_header = "%s-header.txt" % os.path.splitext(out_file)[0] cores = dd.get_cores(data) do.run("samtools view -H {in_bam} | grep -v ^@RG > {new_header}".format(**locals()), "Create empty RG header: %s" % dd.get_sample_name(data)) cmd = ("samtools reheader {new_header} {in_bam} | " "samtools addreplacerg -@ {cores} -r '{rg_info}' -m overwrite_all -O bam -o {tx_out_file} -") do.run(cmd.format(**locals()), "Fix read groups: %s" % dd.get_sample_name(data)) return out_file def remove_extracontigs(in_bam, data): """Remove extra contigs (non chr1-22,X,Y) from an input BAM. These extra contigs can often be arranged in different ways, causing incompatibility issues with GATK and other tools. This also fixes the read group header as in fixrg. This does not yet handle mapping over 1 -> chr1 issues since this requires a ton of search/replace which slows down conversion. """ work_dir = utils.safe_makedir(os.path.join(dd.get_work_dir(data), "bamclean", dd.get_sample_name(data))) out_file = os.path.join(work_dir, "%s-noextras.bam" % utils.splitext_plus(os.path.basename(in_bam))[0]) if not utils.file_exists(out_file): out_file = os.path.join(work_dir, "%s-noextras.bam" % dd.get_sample_name(data)) if not utils.file_uptodate(out_file, in_bam): with file_transaction(data, out_file) as tx_out_file: target_chroms = _target_chroms_and_header(in_bam, data) str_chroms = " ".join(target_chroms) rg_info = novoalign.get_rg_info(data["rgnames"]) bcbio_py = sys.executable ref_file = dd.get_ref_file(data) local_bam = os.path.join(os.path.dirname(tx_out_file), os.path.basename(in_bam)) cores = dd.get_cores(data) utils.symlink_plus(in_bam, local_bam) bam.index(local_bam, data["config"]) cmd = ("samtools view -@ {cores} -h {local_bam} {str_chroms} | " """{bcbio_py} -c 'from bcbio.pipeline import cleanbam; """ """cleanbam.fix_header("{ref_file}")' | """ "samtools view -@ {cores} -u - | " "samtools addreplacerg -@ {cores} -r '{rg_info}' -m overwrite_all -O bam -o {tx_out_file} - ") do.run(cmd.format(**locals()), "bamprep, remove extra contigs: %s" % dd.get_sample_name(data)) return out_file def _target_chroms_and_header(bam_file, data): """Get a list of chromosomes to target and new updated ref_file header. Could potentially handle remapping from chr1 -> 1 but currently disabled due to speed issues. """ special_remaps = {"chrM": "MT", "MT": "chrM"} target_chroms = dict([(x.name, i) for i, x in enumerate(ref.file_contigs(dd.get_ref_file(data))) if chromhacks.is_autosomal_or_sex(x.name)]) out_chroms = [] with pysam.Samfile(bam_file, "rb") as bamfile: for bami, bam_contig in enumerate([c["SN"] for c in bamfile.header["SQ"]]): if bam_contig in target_chroms: target_chrom = bam_contig elif bam_contig in special_remaps and special_remaps[bam_contig] in target_chroms: target_chrom = special_remaps[bam_contig] elif bam_contig.startswith("chr") and bam_contig.replace("chr", "") in target_chroms: target_chrom = bam_contig.replace("chr", "") elif "chr%s" % bam_contig in target_chroms: target_chrom = "chr%s" % bam_contig else: target_chrom = None # target_chrom == bam_contig ensures we don't try chr1 -> 1 style remapping if target_chrom and target_chrom == bam_contig: # Order not required if dealing with SAM file header fixing #assert bami == target_chroms[target_chrom], \ # ("remove_extracontigs: Non-matching order of standard contig: %s %s (%s vs %s)" % # (bam_file, target_chrom, bami, target_chroms[target_chrom])) out_chroms.append(target_chrom) assert out_chroms, ("remove_extracontigs: Did not find any chromosomes in reference file: %s %s" % (bam_file, target_chroms)) return out_chroms def fix_header(ref_file): added_ref = False for line in sys.stdin: # skip current read groups, since adding new # skip current contigs since adding new sequence dictionary if line.startswith(("@RG", "@SQ")): pass elif not added_ref and not line.startswith("@"): for x in ref.file_contigs(ref_file): sys.stdout.write("@SQ\tSN:%s\tLN:%s\n" % (x.name, x.size)) added_ref = True else: sys.stdout.write(line) def picard_prep(in_bam, names, ref_file, dirs, data): """Prepare input BAM using Picard and GATK cleaning tools. - ReorderSam to reorder file to reference - AddOrReplaceReadGroups to add read group information and coordinate sort - PrintReads to filters to remove problem records: - filterMBQ to remove reads with mismatching bases and base qualities """ runner = broad.runner_from_path("picard", data["config"]) work_dir = utils.safe_makedir(os.path.join(dirs["work"], "bamclean", names["sample"])) runner.run_fn("picard_index_ref", ref_file) reorder_bam = os.path.join(work_dir, "%s-reorder.bam" % os.path.splitext(os.path.basename(in_bam))[0]) if not utils.file_exists(reorder_bam): reorder_bam = os.path.join(work_dir, "%s-reorder.bam" % dd.get_sample_name(data)) reorder_bam = runner.run_fn("picard_reorder", in_bam, ref_file, reorder_bam) rg_bam = runner.run_fn("picard_fix_rgs", reorder_bam, names) return _filter_bad_reads(rg_bam, ref_file, data) def _filter_bad_reads(in_bam, ref_file, data): """Use GATK filter to remove problem reads which choke GATK and Picard. """ bam.index(in_bam, data["config"]) out_file = "%s-gatkfilter.bam" % os.path.splitext(in_bam)[0] if not utils.file_exists(out_file): with tx_tmpdir(data) as tmp_dir: with file_transaction(data, out_file) as tx_out_file: params = [("FixMisencodedBaseQualityReads" if dd.get_quality_format(data, "").lower() == "illumina" else "PrintReads"), "-R", ref_file, "-I", in_bam, "-O", tx_out_file, "-RF", "MatchingBasesAndQualsReadFilter", "-RF", "SeqIsStoredReadFilter", "-RF", "CigarContainsNoNOperator"] jvm_opts = broad.get_gatk_opts(data["config"], tmp_dir) do.run(broad.gatk_cmd("gatk", jvm_opts, params), "Filter problem reads") bam.index(out_file, data["config"]) return out_file
a113n/bcbio-nextgen
bcbio/pipeline/cleanbam.py
Python
mit
8,337
0.003958
# -*- coding: utf-8 -*- """ pgp_import command Import keys and signatures from a given GPG keyring. Usage: ./manage.py pgp_import <keyring_path> """ from collections import namedtuple, OrderedDict from datetime import datetime import logging from pytz import utc import subprocess import sys from django.core.management.base import BaseCommand, CommandError from django.db import transaction from devel.models import DeveloperKey, PGPSignature from devel.utils import UserFinder logging.basicConfig( level=logging.INFO, format='%(asctime)s -> %(levelname)s: %(message)s', datefmt='%Y-%m-%d %H:%M:%S', stream=sys.stderr) logger = logging.getLogger() class Command(BaseCommand): args = "<keyring_path>" help = "Import keys and signatures from a given GPG keyring." def handle(self, *args, **options): v = int(options.get('verbosity', None)) if v == 0: logger.level = logging.ERROR elif v == 1: logger.level = logging.INFO elif v >= 2: logger.level = logging.DEBUG if len(args) < 1: raise CommandError("keyring_path must be provided") import_keys(args[0]) import_signatures(args[0]) def get_date(epoch_string): '''Convert a epoch string into a python 'date' object (not datetime).''' if not epoch_string: return None return datetime.utcfromtimestamp(int(epoch_string)).date() def get_datetime(epoch_string): '''Convert a epoch string into a python 'datetime' object.''' if not epoch_string: return None return datetime.utcfromtimestamp(int(epoch_string)).replace(tzinfo=utc) def call_gpg(keyring, *args): # GPG is stupid and interprets any filename without path portion as being # in ~/.gnupg/. Fake it out if we just get a bare filename. if '/' not in keyring: keyring = './%s' % keyring gpg_cmd = ["gpg2", "--no-default-keyring", "--keyring", keyring, "--with-colons", "--fixed-list-mode"] gpg_cmd.extend(args) logger.info("running command: %s", ' '.join(gpg_cmd)) proc = subprocess.Popen(gpg_cmd, stdout=subprocess.PIPE) outdata, errdata = proc.communicate() if proc.returncode != 0: logger.error(errdata) raise subprocess.CalledProcessError(proc.returncode, gpg_cmd) return outdata class KeyData(object): def __init__(self, key, created, expires): self.key = key self.created = get_datetime(created) self.expires = get_datetime(expires) self.parent = None self.revoked = None self.db_id = None def parse_keydata(data): keys = OrderedDict() current_pubkey = None # parse all of the output from our successful GPG command logger.info("parsing command output") node = None for line in data.split('\n'): parts = line.split(':') if parts[0] == 'pub': key = parts[4] current_pubkey = key keys[key] = KeyData(key, parts[5], parts[6]) node = parts[0] elif parts[0] == 'sub': key = parts[4] keys[key] = KeyData(key, parts[5], parts[6]) keys[key].parent = current_pubkey node = parts[0] elif parts[0] == 'uid': node = parts[0] elif parts[0] == 'rev' and node in ('pub', 'sub'): keys[current_pubkey].revoked = get_datetime(parts[5]) return keys def find_key_owner(key, keys, finder): '''Recurse up the chain, looking for an owner.''' if key is None: return None owner = finder.find_by_pgp_key(key.key) if owner: return owner if key.parent: return find_key_owner(keys[key.parent], keys, finder) return None def import_keys(keyring): outdata = call_gpg(keyring, "--list-sigs") keydata = parse_keydata(outdata) logger.info("creating or finding %d keys", len(keydata)) created_ct = updated_ct = 0 with transaction.atomic(): finder = UserFinder() # we are dependent on parents coming before children; parse_keydata # uses an OrderedDict to ensure this is the case. for data in keydata.values(): parent_id = None if data.parent: parent_data = keydata.get(data.parent, None) if parent_data: parent_id = parent_data.db_id other = { 'expires': data.expires, 'revoked': data.revoked, 'parent_id': parent_id, } dkey, created = DeveloperKey.objects.get_or_create( key=data.key, created=data.created, defaults=other) data.db_id = dkey.id # set or update any additional data we might need to needs_save = False if created: created_ct += 1 else: for k, v in other.items(): if getattr(dkey, k) != v: setattr(dkey, k, v) needs_save = True if dkey.owner_id is None: owner = find_key_owner(data, keydata, finder) if owner is not None: dkey.owner = owner needs_save = True if needs_save: dkey.save() updated_ct += 1 key_ct = DeveloperKey.objects.all().count() logger.info("%d total keys in database", key_ct) logger.info("created %d, updated %d keys", created_ct, updated_ct) class SignatureData(object): def __init__(self, signer, signee, created): self.signer = signer self.signee = signee self.created = created self.expires = None self.revoked = None def parse_sigdata(data): nodes = {} edges = [] current_pubkey = None # parse all of the output from our successful GPG command logger.info("parsing command output") for line in data.split('\n'): parts = line.split(':') if parts[0] == 'pub': current_pubkey = parts[4] nodes[current_pubkey] = None elif parts[0] == 'uid': uid = parts[9] # only set uid if this is the first one encountered if nodes[current_pubkey] is None: nodes[current_pubkey] = uid elif parts[0] == 'sig': signer = parts[4] created = get_date(parts[5]) edge = SignatureData(signer, current_pubkey, created) if parts[6]: edge.expires = get_date(parts[6]) edges.append(edge) elif parts[0] == 'rev': signer = parts[4] revoked = get_date(parts[5]) # revoke any prior edges that match matches = [e for e in edges if e.signer == signer and e.signee == current_pubkey] for edge in matches: edge.revoked = revoked return nodes, edges def import_signatures(keyring): outdata = call_gpg(keyring, "--list-sigs") nodes, edges = parse_sigdata(outdata) # now prune the data down to what we actually want. # prune edges not in nodes, remove duplicates, and self-sigs pruned_edges = {edge for edge in edges if edge.signer in nodes and edge.signer != edge.signee} logger.info("creating or finding up to %d signatures", len(pruned_edges)) created_ct = updated_ct = 0 with transaction.atomic(): for edge in pruned_edges: sig, created = PGPSignature.objects.get_or_create( signer=edge.signer, signee=edge.signee, created=edge.created, expires=edge.expires, defaults={ 'revoked': edge.revoked }) if sig.revoked != edge.revoked: sig.revoked = edge.revoked sig.save() updated_ct += 1 if created: created_ct += 1 sig_ct = PGPSignature.objects.all().count() logger.info("%d total signatures in database", sig_ct) logger.info("created %d, updated %d signatures", created_ct, updated_ct) # vim: set ts=4 sw=4 et:
brain0/archweb
devel/management/commands/pgp_import.py
Python
gpl-2.0
8,172
0.000734
""" Qxf2 Services: A plug-n-play class for logging. This class wraps around Python's loguru module. """ import os, inspect import pytest,logging from loguru import logger from pytest_reportportal import RPLogger, RPLogHandler class Base_Logging(): "A plug-n-play class for logging" def __init__(self,log_file_name=None,level="DEBUG",format="{time:YYYY-MM-DD HH:mm:ss} | {level} | {module} | {message}"): "Constructor for the logging class" self.log_file_name=log_file_name self.log_file_dir = os.path.abspath(os.path.join(os.path.dirname(__file__),'..','log')) self.level=level self.format=format self.log = self.set_log(self.log_file_name,self.level,self.format) self.rp_logger = None def set_log(self,log_file_name,level,format,test_module_name=None): "Add an handler sending log messages to a sink" if test_module_name is None: test_module_name = self.get_calling_module() if not os.path.exists(self.log_file_dir): os.makedirs(self.log_file_dir) if log_file_name is None: log_file_name = self.log_file_dir + os.sep + test_module_name + '.log' else: log_file_name = self.log_file_dir + os.sep + log_file_name logger.add(log_file_name,level=level,format=format, rotation="30 days", filter=None, colorize=None, serialize=False, backtrace=True, enqueue=False, catch=True) def get_calling_module(self): "Get the name of the calling module" calling_file = inspect.stack()[-1][1] if 'runpy' in calling_file: calling_file = inspect.stack()[4][1] calling_filename = calling_file.split(os.sep) #This logic bought to you by windows + cygwin + git bash if len(calling_filename) == 1: #Needed for calling_filename = calling_file.split('/') self.calling_module = calling_filename[-1].split('.')[0] return self.calling_module def setup_rp_logging(self, rp_pytest_service): "Setup reportportal logging" try: # Setting up a logging. logging.setLoggerClass(RPLogger) self.rp_logger = logging.getLogger(__name__) self.rp_logger.setLevel(logging.INFO) # Create handler for Report Portal. rp_handler = RPLogHandler(rp_pytest_service) # Set INFO level for Report Portal handler. rp_handler.setLevel(logging.INFO) return self.rp_logger except Exception as e: self.write("Exception when trying to set rplogger") self.write(str(e)) self.exceptions.append("Error when setting up the reportportal logger") def write(self,msg,level='info'): "Write out a message" #fname = inspect.stack()[2][3] #May be use a entry-exit decorator instead all_stack_frames = inspect.stack() for stack_frame in all_stack_frames[1:]: if 'Base_Page' not in stack_frame[1]: break fname = stack_frame[3] d = {'caller_func': fname} if self.rp_logger: if level.lower()== 'debug': self.rp_logger.debug(msg=msg) elif level.lower()== 'info': self.rp_logger.info(msg) elif level.lower()== 'warn' or level.lower()=='warning': self.rp_logger.warning(msg) elif level.lower()== 'error': self.rp_logger.error(msg) elif level.lower()== 'critical': self.rp_logger.critical(msg) else: self.rp_logger.critical(msg) return if level.lower()== 'debug': logger.debug("{module} | {msg}",module=d['caller_func'],msg=msg) elif level.lower()== 'info': logger.info("{module} | {msg}",module=d['caller_func'],msg=msg) elif level.lower()== 'warn' or level.lower()=='warning': logger.warning("{module} | {msg}",module=d['caller_func'],msg=msg) elif level.lower()== 'error': logger.error("{module} | {msg}",module=d['caller_func'],msg=msg) elif level.lower()== 'critical': logger.critical("{module} | {msg}",module=d['caller_func'],msg=msg) else: logger.critical("Unknown level passed for the msg: {}", msg)
qxf2/qxf2-page-object-model
utils/Base_Logging.py
Python
mit
4,369
0.013733
"""mysite URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/1.11/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home') Including another URLconf 1. Import the include() function: from django.conf.urls import url, include 2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls')) """ from django.conf.urls import url, include from django.contrib import admin urlpatterns = [ url(r'^admin/', admin.site.urls), url(r'', include('almoxarifado.urls')), ]
rvmoura96/projeto-almoxarifado
mysite/urls.py
Python
mit
838
0
# Generated by Django 2.1.3 on 2018-11-09 18:28 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('proposals', '0016_auto_20180405_2116'), ] operations = [ migrations.AlterField( model_name='timeallocation', name='instrument_name', field=models.CharField(choices=[('0M4-SCICAM-SBIG', '0M4-SCICAM-SBIG'), ('0M8-NRES-SCICAM', '0M8-NRES-SCICAM'), ('0M8-SCICAM-SBIG', '0M8-SCICAM-SBIG'), ('1M0-NRES-SCICAM', '1M0-NRES-SCICAM'), ('1M0-SCICAM-SINISTRO', '1M0-SCICAM-SINISTRO'), ('1M0-SCICAM-SBIG', '1M0-SCICAM-SBIG'), ('1M0-NRES-COMMISSIONING', '1M0-NRES-COMMISSIONING'), ('2M0-FLOYDS-SCICAM', '2M0-FLOYDS-SCICAM'), ('2M0-SCICAM-SPECTRAL', '2M0-SCICAM-SPECTRAL'), ('2M0-SCICAM-SBIG', '2M0-SCICAM-SBIG')], max_length=200), ), migrations.AlterField( model_name='timeallocation', name='telescope_class', field=models.CharField(choices=[('0m4', '0m4'), ('0m8', '0m8'), ('1m0', '1m0'), ('2m0', '2m0')], max_length=20), ), ]
LCOGT/valhalla
valhalla/proposals/migrations/0017_auto_20181109_1828.py
Python
gpl-3.0
1,094
0.001828
"""Support for Transport NSW (AU) to query next leave event.""" from datetime import timedelta from TransportNSW import TransportNSW import voluptuous as vol from homeassistant.components.sensor import PLATFORM_SCHEMA, SensorEntity from homeassistant.const import ( ATTR_ATTRIBUTION, ATTR_MODE, CONF_API_KEY, CONF_NAME, TIME_MINUTES, ) import homeassistant.helpers.config_validation as cv ATTR_STOP_ID = "stop_id" ATTR_ROUTE = "route" ATTR_DUE_IN = "due" ATTR_DELAY = "delay" ATTR_REAL_TIME = "real_time" ATTR_DESTINATION = "destination" ATTRIBUTION = "Data provided by Transport NSW" CONF_STOP_ID = "stop_id" CONF_ROUTE = "route" CONF_DESTINATION = "destination" DEFAULT_NAME = "Next Bus" ICONS = { "Train": "mdi:train", "Lightrail": "mdi:tram", "Bus": "mdi:bus", "Coach": "mdi:bus", "Ferry": "mdi:ferry", "Schoolbus": "mdi:bus", "n/a": "mdi:clock", None: "mdi:clock", } SCAN_INTERVAL = timedelta(seconds=60) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_STOP_ID): cv.string, vol.Required(CONF_API_KEY): cv.string, vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Optional(CONF_ROUTE, default=""): cv.string, vol.Optional(CONF_DESTINATION, default=""): cv.string, } ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the Transport NSW sensor.""" stop_id = config[CONF_STOP_ID] api_key = config[CONF_API_KEY] route = config.get(CONF_ROUTE) destination = config.get(CONF_DESTINATION) name = config.get(CONF_NAME) data = PublicTransportData(stop_id, route, destination, api_key) add_entities([TransportNSWSensor(data, stop_id, name)], True) class TransportNSWSensor(SensorEntity): """Implementation of an Transport NSW sensor.""" def __init__(self, data, stop_id, name): """Initialize the sensor.""" self.data = data self._name = name self._stop_id = stop_id self._times = self._state = None self._icon = ICONS[None] @property def name(self): """Return the name of the sensor.""" return self._name @property def native_value(self): """Return the state of the sensor.""" return self._state @property def extra_state_attributes(self): """Return the state attributes.""" if self._times is not None: return { ATTR_DUE_IN: self._times[ATTR_DUE_IN], ATTR_STOP_ID: self._stop_id, ATTR_ROUTE: self._times[ATTR_ROUTE], ATTR_DELAY: self._times[ATTR_DELAY], ATTR_REAL_TIME: self._times[ATTR_REAL_TIME], ATTR_DESTINATION: self._times[ATTR_DESTINATION], ATTR_MODE: self._times[ATTR_MODE], ATTR_ATTRIBUTION: ATTRIBUTION, } @property def native_unit_of_measurement(self): """Return the unit this state is expressed in.""" return TIME_MINUTES @property def icon(self): """Icon to use in the frontend, if any.""" return self._icon def update(self): """Get the latest data from Transport NSW and update the states.""" self.data.update() self._times = self.data.info self._state = self._times[ATTR_DUE_IN] self._icon = ICONS[self._times[ATTR_MODE]] class PublicTransportData: """The Class for handling the data retrieval.""" def __init__(self, stop_id, route, destination, api_key): """Initialize the data object.""" self._stop_id = stop_id self._route = route self._destination = destination self._api_key = api_key self.info = { ATTR_ROUTE: self._route, ATTR_DUE_IN: "n/a", ATTR_DELAY: "n/a", ATTR_REAL_TIME: "n/a", ATTR_DESTINATION: "n/a", ATTR_MODE: None, } self.tnsw = TransportNSW() def update(self): """Get the next leave time.""" _data = self.tnsw.get_departures( self._stop_id, self._route, self._destination, self._api_key ) self.info = { ATTR_ROUTE: _data["route"], ATTR_DUE_IN: _data["due"], ATTR_DELAY: _data["delay"], ATTR_REAL_TIME: _data["real_time"], ATTR_DESTINATION: _data["destination"], ATTR_MODE: _data["mode"], }
lukas-hetzenecker/home-assistant
homeassistant/components/transport_nsw/sensor.py
Python
apache-2.0
4,474
0
# Copyright (c) 2019 Guangwang Huang # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. import pytest import libqtile.config from libqtile import layout from libqtile.confreader import Config from test.layouts.layout_utils import assert_focus_path, assert_focused class TreeTabConfig(Config): auto_fullscreen = True groups = [ libqtile.config.Group("a"), libqtile.config.Group("b"), libqtile.config.Group("c"), libqtile.config.Group("d"), ] layouts = [ layout.TreeTab(sections=["Foo", "Bar"]), ] floating_layout = libqtile.resources.default_config.floating_layout keys = [] mouse = [] screens = [] follow_mouse_focus = False treetab_config = pytest.mark.parametrize("manager", [TreeTabConfig], indirect=True) @treetab_config def test_window(manager): # setup 3 tiled and two floating clients manager.test_window("one") manager.test_window("two") manager.test_window("float1", floating=True) manager.test_window("float2", floating=True) manager.test_window("three") # test preconditions, columns adds clients at pos of current, in two stacks assert manager.c.layout.info()["clients"] == ["one", "three", "two"] assert manager.c.layout.info()["sections"] == ["Foo", "Bar"] assert manager.c.layout.info()["client_trees"] == { "Foo": [["one"], ["two"], ["three"]], "Bar": [], } # last added window has focus assert_focused(manager, "three") manager.c.layout.up() assert_focused(manager, "two") manager.c.layout.down() assert_focused(manager, "three") # test command move_up/down manager.c.layout.move_up() assert manager.c.layout.info()["clients"] == ["one", "three", "two"] assert manager.c.layout.info()["client_trees"] == { "Foo": [["one"], ["three"], ["two"]], "Bar": [], } manager.c.layout.move_down() assert manager.c.layout.info()["client_trees"] == { "Foo": [["one"], ["two"], ["three"]], "Bar": [], } # section_down/up manager.c.layout.up() # focus two manager.c.layout.section_down() assert manager.c.layout.info()["client_trees"] == { "Foo": [["one"], ["three"]], "Bar": [["two"]], } manager.c.layout.section_up() assert manager.c.layout.info()["client_trees"] == { "Foo": [["one"], ["three"], ["two"]], "Bar": [], } # del_section manager.c.layout.up() # focus three manager.c.layout.section_down() manager.c.layout.del_section("Bar") assert manager.c.layout.info()["client_trees"] == {"Foo": [["one"], ["two"], ["three"]]} # add_section manager.c.layout.add_section("Baz") assert manager.c.layout.info()["client_trees"] == { "Foo": [["one"], ["two"], ["three"]], "Baz": [], } manager.c.layout.del_section("Baz") # move_left/right manager.c.layout.move_left() # no effect for top-level children assert manager.c.layout.info()["client_trees"] == {"Foo": [["one"], ["two"], ["three"]]} manager.c.layout.move_right() assert manager.c.layout.info()["client_trees"] == {"Foo": [["one"], ["two", ["three"]]]} manager.c.layout.move_right() # no effect assert manager.c.layout.info()["client_trees"] == {"Foo": [["one"], ["two", ["three"]]]} manager.test_window("four") manager.c.layout.move_right() manager.c.layout.up() manager.test_window("five") assert manager.c.layout.info()["client_trees"] == { "Foo": [["one"], ["two", ["three", ["four"]], ["five"]]] } # expand/collapse_branch, and check focus order manager.c.layout.up() manager.c.layout.up() # focus three manager.c.layout.collapse_branch() assert manager.c.layout.info()["client_trees"] == { "Foo": [["one"], ["two", ["three"], ["five"]]] } assert_focus_path(manager, "five", "float1", "float2", "one", "two", "three") manager.c.layout.expand_branch() assert manager.c.layout.info()["client_trees"] == { "Foo": [["one"], ["two", ["three", ["four"]], ["five"]]] } assert_focus_path(manager, "four", "five", "float1", "float2", "one", "two", "three") @treetab_config def test_sort_windows(manager): manager.test_window("one") manager.test_window("two") manager.test_window("101") manager.test_window("102") manager.test_window("103") assert manager.c.layout.info()["client_trees"] == { "Foo": [["one"], ["two"], ["101"], ["102"], ["103"]], "Bar": [], } """ # TODO how to serialize a function object? i.e. `sorter`: def sorter(window): try: if int(window.name) % 2 == 0: return 'Even' else: return 'Odd' except ValueError: return 'Bar' manager.c.layout.sort_windows(sorter) assert manager.c.layout.info()['client_trees'] == { 'Foo': [], 'Bar': [['one'], ['two']], 'Even': [['102']], 'Odd': [['101'], ['103']] } """
qtile/qtile
test/layouts/test_treetab.py
Python
mit
6,060
0.001155
"""Tests for vumi.middleware.tagger.""" import re from vumi.middleware.tagger import TaggingMiddleware from vumi.message import TransportUserMessage from vumi.tests.helpers import VumiTestCase class TestTaggingMiddleware(VumiTestCase): DEFAULT_CONFIG = { 'incoming': { 'addr_pattern': r'^\d+(\d{3})$', 'tagpool_template': r'pool1', 'tagname_template': r'mytag-\1', }, 'outgoing': { 'tagname_pattern': r'mytag-(\d{3})$', 'msg_template': { 'from_addr': r'1234*\1', }, }, } def mk_tagger(self, config=None): dummy_worker = object() if config is None: config = self.DEFAULT_CONFIG self.mw = TaggingMiddleware("dummy_tagger", config, dummy_worker) self.mw.setup_middleware() def mk_msg(self, to_addr, tag=None, from_addr="12345"): msg = TransportUserMessage(to_addr=to_addr, from_addr=from_addr, transport_name="dummy_connector", transport_type="dummy_transport_type") if tag is not None: TaggingMiddleware.add_tag_to_msg(msg, tag) return msg def get_tag(self, to_addr): msg = self.mk_msg(to_addr) msg = self.mw.handle_inbound(msg, "dummy_connector") return TaggingMiddleware.map_msg_to_tag(msg) def get_from_addr(self, to_addr, tag): msg = self.mk_msg(to_addr, tag, from_addr=None) msg = self.mw.handle_outbound(msg, "dummy_connector") return msg['from_addr'] def test_inbound_matching_to_addr(self): self.mk_tagger() self.assertEqual(self.get_tag("123456"), ("pool1", "mytag-456")) self.assertEqual(self.get_tag("1234"), ("pool1", "mytag-234")) def test_inbound_nonmatching_to_addr(self): self.mk_tagger() self.assertEqual(self.get_tag("a1234"), None) def test_inbound_nonmatching_to_addr_leaves_msg_unmodified(self): self.mk_tagger() tag = ("dont", "modify") orig_msg = self.mk_msg("a1234", tag=tag) msg = orig_msg.from_json(orig_msg.to_json()) msg = self.mw.handle_inbound(msg, "dummy_connector") self.assertEqual(msg, orig_msg) def test_inbound_none_to_addr(self): self.mk_tagger() self.assertEqual(self.get_tag(None), None) def test_outbound_matching_tag(self): self.mk_tagger() self.assertEqual(self.get_from_addr("111", ("pool1", "mytag-456")), "1234*456") self.assertEqual(self.get_from_addr("111", ("pool1", "mytag-789")), "1234*789") def test_outbound_nonmatching_tag(self): self.mk_tagger() self.assertEqual(self.get_from_addr("111", ("pool1", "othertag-456")), None) def test_outbound_nonmatching_tag_leaves_msg_unmodified(self): self.mk_tagger() orig_msg = self.mk_msg("a1234", tag=("pool1", "othertag-456")) msg = orig_msg.from_json(orig_msg.to_json()) msg = self.mw.handle_outbound(msg, "dummy_connector") for key in msg.payload.keys(): self.assertEqual(msg[key], orig_msg[key], "Key %r not equal" % key) self.assertEqual(msg, orig_msg) def test_outbound_no_tag(self): self.mk_tagger() self.assertEqual(self.get_from_addr("111", None), None) def test_deepupdate(self): orig = {'a': {'b': "foo"}, 'c': "bar"} TaggingMiddleware._deepupdate(re.match(".*", "foo"), orig, {'a': {'b': "baz"}, 'd': r'\g<0>!', 'e': 1}) self.assertEqual(orig, {'a': {'b': "baz"}, 'c': "bar", 'd': "foo!", 'e': 1}) def test_deepupdate_with_recursion(self): self.mk_tagger() orig = {'a': {'b': "foo"}, 'c': "bar"} new = {'a': {'b': "baz"}} new['a']['d'] = new TaggingMiddleware._deepupdate(re.match(".*", "foo"), orig, new) self.assertEqual(orig, {'a': {'b': "baz"}, 'c': "bar"}) def test_map_msg_to_tag(self): msg = self.mk_msg("123456") self.assertEqual(TaggingMiddleware.map_msg_to_tag(msg), None) msg['helper_metadata']['tag'] = {'tag': ['pool', 'mytag']} self.assertEqual(TaggingMiddleware.map_msg_to_tag(msg), ("pool", "mytag")) def test_add_tag_to_msg(self): msg = self.mk_msg("123456") TaggingMiddleware.add_tag_to_msg(msg, ('pool', 'mytag')) self.assertEqual(msg['helper_metadata']['tag'], { 'tag': ['pool', 'mytag'], }) def test_add_tag_to_payload(self): payload = {} TaggingMiddleware.add_tag_to_payload(payload, ('pool', 'mytag')) self.assertEqual(payload, { 'helper_metadata': { 'tag': { 'tag': ['pool', 'mytag'], }, }, })
TouK/vumi
vumi/middleware/tests/test_tagger.py
Python
bsd-3-clause
5,053
0
"""Tests for tools and arithmetics for monomials of distributed polynomials. """ from sympy.polys.monomialtools import ( monomials, monomial_count, monomial_lex_cmp, monomial_grlex_cmp, monomial_grevlex_cmp, monomial_cmp, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, ) from sympy.abc import x, y from sympy.utilities.pytest import raises def test_monomials(): assert sorted(monomials([], 0)) == [1] assert sorted(monomials([], 1)) == [1] assert sorted(monomials([], 2)) == [1] assert sorted(monomials([], 3)) == [1] assert sorted(monomials([x], 0)) == [1] assert sorted(monomials([x], 1)) == [1, x] assert sorted(monomials([x], 2)) == [1, x, x**2] assert sorted(monomials([x], 3)) == [1, x, x**2, x**3] assert sorted(monomials([x, y], 0)) == [1] assert sorted(monomials([x, y], 1)) == [1, x, y] assert sorted(monomials([x, y], 2)) == [1, x, y, x**2, y**2, x*y] assert sorted(monomials([x, y], 3)) == [1, x, y, x**2, x**3, y**2, y**3, x*y, x*y**2, y*x**2] def test_monomial_count(): assert monomial_count(2, 2) == 6 assert monomial_count(2, 3) == 10 def test_monomial_lex_cmp(): assert monomial_lex_cmp((1,2,3), (1,2,3)) == 0 assert monomial_lex_cmp((2,2,3), (1,2,3)) == 1 assert monomial_lex_cmp((1,3,3), (1,2,3)) == 1 assert monomial_lex_cmp((1,2,4), (1,2,3)) == 1 assert monomial_lex_cmp((0,2,3), (1,2,3)) == -1 assert monomial_lex_cmp((1,1,3), (1,2,3)) == -1 assert monomial_lex_cmp((1,2,2), (1,2,3)) == -1 def test_monomial_grlex_cmp(): assert monomial_grlex_cmp((1,2,3), (1,2,3)) == 0 assert monomial_grlex_cmp((2,2,3), (1,2,3)) == 1 assert monomial_grlex_cmp((1,3,3), (1,2,3)) == 1 assert monomial_grlex_cmp((1,2,4), (1,2,3)) == 1 assert monomial_grlex_cmp((0,2,3), (1,2,3)) == -1 assert monomial_grlex_cmp((1,1,3), (1,2,3)) == -1 assert monomial_grlex_cmp((1,2,2), (1,2,3)) == -1 assert monomial_grlex_cmp((2,2,3), (1,2,4)) == 1 assert monomial_grlex_cmp((1,3,3), (1,2,4)) == 1 assert monomial_grlex_cmp((0,2,3), (1,2,2)) == -1 assert monomial_grlex_cmp((1,1,3), (1,2,2)) == -1 def test_monomial_grevlex_cmp(): assert monomial_grevlex_cmp((1,2,3), (1,2,3)) == 0 assert monomial_grevlex_cmp((2,2,3), (1,2,3)) == 1 assert monomial_grevlex_cmp((1,3,3), (1,2,3)) == 1 assert monomial_grevlex_cmp((1,2,4), (1,2,3)) == 1 assert monomial_grevlex_cmp((0,2,3), (1,2,3)) == -1 assert monomial_grevlex_cmp((1,1,3), (1,2,3)) == -1 assert monomial_grevlex_cmp((1,2,2), (1,2,3)) == -1 assert monomial_grevlex_cmp((2,2,3), (1,2,4)) == 1 assert monomial_grevlex_cmp((1,3,3), (1,2,4)) == 1 assert monomial_grevlex_cmp((0,2,3), (1,2,2)) == -1 assert monomial_grevlex_cmp((1,1,3), (1,2,2)) == -1 def test_monomial_cmp(): assert monomial_cmp('lex') == monomial_lex_cmp assert monomial_cmp('grlex') == monomial_grlex_cmp assert monomial_cmp('grevlex') == monomial_grevlex_cmp raises(ValueError, "monomial_cmp('unknown')") def test_monomial_mul(): assert monomial_mul((3,4,1), (1,2,0)) == (4,6,1) def test_monomial_div(): assert monomial_div((3,4,1), (1,2,0)) == (2,2,1) def test_monomial_gcd(): assert monomial_gcd((3,4,1), (1,2,0)) == (1,2,0) def test_monomial_lcm(): assert monomial_lcm((3,4,1), (1,2,0)) == (3,4,1) def test_monomial_max(): assert monomial_max((3,4,5), (0,5,1), (6,3,9)) == (6,5,9) def test_monomial_min(): assert monomial_min((3,4,5), (0,5,1), (6,3,9)) == (0,3,1)
tarballs-are-good/sympy
sympy/polys/tests/test_monomialtools.py
Python
bsd-3-clause
3,561
0.048301
# Copyright 2011 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """The Extended Status Admin API extension.""" from nova.api.openstack import extensions from nova.api.openstack import wsgi from nova.api.openstack import xmlutil from nova import compute authorize = extensions.soft_extension_authorizer('compute', 'extended_status') class ExtendedStatusController(wsgi.Controller): def __init__(self, *args, **kwargs): super(ExtendedStatusController, self).__init__(*args, **kwargs) self.compute_api = compute.API() def _extend_server(self, server, instance): for state in ['task_state', 'vm_state', 'power_state']: key = "%s:%s" % (Extended_status.alias, state) server[key] = instance[state] @wsgi.extends def show(self, req, resp_obj, id): context = req.environ['nova.context'] if authorize(context): # Attach our slave template to the response object resp_obj.attach(xml=ExtendedStatusTemplate()) server = resp_obj.obj['server'] db_instance = req.get_db_instance(server['id']) # server['id'] is guaranteed to be in the cache due to # the core API adding it in its 'show' method. self._extend_server(server, db_instance) @wsgi.extends def detail(self, req, resp_obj): context = req.environ['nova.context'] if authorize(context): # Attach our slave template to the response object resp_obj.attach(xml=ExtendedStatusesTemplate()) servers = list(resp_obj.obj['servers']) for server in servers: db_instance = req.get_db_instance(server['id']) # server['id'] is guaranteed to be in the cache due to # the core API adding it in its 'detail' method. self._extend_server(server, db_instance) class Extended_status(extensions.ExtensionDescriptor): """Extended Status support.""" name = "ExtendedStatus" alias = "OS-EXT-STS" namespace = ("http://docs.openstack.org/compute/ext/" "extended_status/api/v1.1") updated = "2011-11-03T00:00:00Z" def get_controller_extensions(self): controller = ExtendedStatusController() extension = extensions.ControllerExtension(self, 'servers', controller) return [extension] def make_server(elem): elem.set('{%s}task_state' % Extended_status.namespace, '%s:task_state' % Extended_status.alias) elem.set('{%s}power_state' % Extended_status.namespace, '%s:power_state' % Extended_status.alias) elem.set('{%s}vm_state' % Extended_status.namespace, '%s:vm_state' % Extended_status.alias) class ExtendedStatusTemplate(xmlutil.TemplateBuilder): def construct(self): root = xmlutil.TemplateElement('server', selector='server') make_server(root) return xmlutil.SlaveTemplate(root, 1, nsmap={ Extended_status.alias: Extended_status.namespace}) class ExtendedStatusesTemplate(xmlutil.TemplateBuilder): def construct(self): root = xmlutil.TemplateElement('servers') elem = xmlutil.SubTemplateElement(root, 'server', selector='servers') make_server(elem) return xmlutil.SlaveTemplate(root, 1, nsmap={ Extended_status.alias: Extended_status.namespace})
ChinaMassClouds/copenstack-server
openstack/src/nova-2014.2/nova/api/openstack/compute/contrib/extended_status.py
Python
gpl-2.0
3,926
0
# # pyconsole.py # # Copyright (C) 2004-2006 by Yevgen Muntyan <muntyan@math.tamu.edu> # Portions of code by Geoffrey French. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public version 2.1 as # published by the Free Software Foundation. # # See COPYING.lib file that comes with this distribution for full text # of the license. # # This module 'runs' python interpreter in a TextView widget. # The main class is Console, usage is: # Console(locals=None, banner=None, completer=None, use_rlcompleter=True, start_script='') - # it creates the widget and 'starts' interactive session; see the end # of this file. If start_script is not empty, it pastes it as it was # entered from keyboard. # # Console has "command" signal which is emitted when code is about to # be executed. You may connect to it using console.connect or # console.connect_after to get your callback ran before or after the # code is executed. # # To modify output appearance, set attributes of console.stdout_tag and # console.stderr_tag. # # Console may subclass a type other than gtk.TextView, to allow syntax # highlighting and stuff, # e.g.: # console_type = pyconsole.ConsoleType(moo.edit.TextView) # console = console_type(use_rlcompleter=False, start_script="import moo\nimport gtk\n") # # This widget is not a replacement for real terminal with python running # inside: GtkTextView is not a terminal. # The use case is: you have a python program, you create this widget, # and inspect your program interiors. import gtk import gtk.gdk as gdk import gobject import pango import gtk.keysyms as _keys import code import sys import keyword import re # commonprefix() from posixpath def _commonprefix(m): "Given a list of pathnames, returns the longest common leading component" if not m: return '' prefix = m[0] for item in m: for i in range(len(prefix)): if prefix[:i+1] != item[:i+1]: prefix = prefix[:i] if i == 0: return '' break return prefix class _ReadLine(object): class Output(object): def __init__(self, console, tag_name): object.__init__(self) self.buffer = console.get_buffer() self.tag_name = tag_name def write(self, text): pos = self.buffer.get_iter_at_mark(self.buffer.get_insert()) self.buffer.insert_with_tags_by_name(pos, text, self.tag_name) class History(object): def __init__(self): object.__init__(self) self.items = [''] self.ptr = 0 self.edited = {} def commit(self, text): if text and self.items[-1] != text: self.items.append(text) self.ptr = 0 self.edited = {} def get(self, dir, text): if len(self.items) == 1: return None if text != self.items[self.ptr]: self.edited[self.ptr] = text elif self.edited.has_key(self.ptr): del self.edited[self.ptr] self.ptr = self.ptr + dir if self.ptr >= len(self.items): self.ptr = 0 elif self.ptr < 0: self.ptr = len(self.items) - 1 try: return self.edited[self.ptr] except KeyError: return self.items[self.ptr] def __init__(self, quit_func=None): object.__init__(self) self.quit_func = quit_func self.set_wrap_mode(gtk.WRAP_CHAR) self.modify_font(pango.FontDescription("Monospace")) self.buffer = self.get_buffer() self.buffer.connect("insert-text", self.on_buf_insert) self.buffer.connect("delete-range", self.on_buf_delete) self.buffer.connect("mark-set", self.on_buf_mark_set) self.do_insert = False self.do_delete = False self.stdout_tag = self.buffer.create_tag("stdout", foreground="#006000") self.stderr_tag = self.buffer.create_tag("stderr", foreground="#B00000") self._stdout = _ReadLine.Output(self, "stdout") self._stderr = _ReadLine.Output(self, "stderr") self.cursor = self.buffer.create_mark("cursor", self.buffer.get_start_iter(), False) insert = self.buffer.get_insert() self.cursor.set_visible(True) insert.set_visible(False) self.ps = '' self.in_raw_input = False self.run_on_raw_input = None self.tab_pressed = 0 self.history = _ReadLine.History() self.nonword_re = re.compile("[^\w\._]") def freeze_undo(self): try: self.begin_not_undoable_action() except: pass def thaw_undo(self): try: self.end_not_undoable_action() except: pass def raw_input(self, ps=None): if ps: self.ps = ps else: self.ps = '' iter = self.buffer.get_iter_at_mark(self.buffer.get_insert()) if ps: self.freeze_undo() self.buffer.insert(iter, self.ps) self.thaw_undo() self.__move_cursor_to(iter) self.scroll_to_mark(self.cursor, 0.2) self.in_raw_input = True if self.run_on_raw_input: run_now = self.run_on_raw_input self.run_on_raw_input = None self.buffer.insert_at_cursor(run_now + '\n') def on_buf_mark_set(self, buffer, iter, mark): if mark is not buffer.get_insert(): return start = self.__get_start() end = self.__get_end() if iter.compare(self.__get_start()) >= 0 and \ iter.compare(self.__get_end()) <= 0: buffer.move_mark_by_name("cursor", iter) self.scroll_to_mark(self.cursor, 0.2) def __insert(self, iter, text): self.do_insert = True self.buffer.insert(iter, text) self.do_insert = False def on_buf_insert(self, buf, iter, text, len): if not self.in_raw_input or self.do_insert or not len: return buf.stop_emission("insert-text") lines = text.splitlines() need_eol = False for l in lines: if need_eol: self._commit() iter = self.__get_cursor() else: cursor = self.__get_cursor() if iter.compare(self.__get_start()) < 0: iter = cursor elif iter.compare(self.__get_end()) > 0: iter = cursor else: self.__move_cursor_to(iter) need_eol = True self.__insert(iter, l) self.__move_cursor(0) def __delete(self, start, end): self.do_delete = True self.buffer.delete(start, end) self.do_delete = False def on_buf_delete(self, buf, start, end): if not self.in_raw_input or self.do_delete: return buf.stop_emission("delete-range") start.order(end) line_start = self.__get_start() line_end = self.__get_end() if start.compare(line_end) > 0: return if end.compare(line_start) < 0: return self.__move_cursor(0) if start.compare(line_start) < 0: start = line_start if end.compare(line_end) > 0: end = line_end self.__delete(start, end) def do_key_press_event(self, event, parent_type): if not self.in_raw_input: return parent_type.do_key_press_event(self, event) tab_pressed = self.tab_pressed self.tab_pressed = 0 handled = True state = event.state & (gdk.SHIFT_MASK | gdk.CONTROL_MASK | gdk.MOD1_MASK) keyval = event.keyval if not state: if keyval == _keys.Return: self._commit() elif keyval == _keys.Up: self.__history(-1) elif keyval == _keys.Down: self.__history(1) elif keyval == _keys.Left: self.__move_cursor(-1) elif keyval == _keys.Right: self.__move_cursor(1) elif keyval == _keys.Home: self.__move_cursor(-10000) elif keyval == _keys.End: self.__move_cursor(10000) elif keyval == _keys.Tab: cursor = self.__get_cursor() if cursor.starts_line(): handled = False else: cursor.backward_char() if cursor.get_char().isspace(): handled = False else: self.tab_pressed = tab_pressed + 1 self.__complete() else: handled = False elif state == gdk.CONTROL_MASK: if keyval == _keys.u: start = self.__get_start() end = self.__get_cursor() self.__delete(start, end) elif keyval == _keys.d: if self.quit_func: self.quit_func() else: handled = False else: handled = False if not handled: return parent_type.do_key_press_event(self, event) else: return True def __history(self, dir): text = self._get_line() new_text = self.history.get(dir, text) if not new_text is None: self.__replace_line(new_text) self.__move_cursor(0) self.scroll_to_mark(self.cursor, 0.2) def __get_cursor(self): return self.buffer.get_iter_at_mark(self.cursor) def __get_start(self): iter = self.__get_cursor() iter.set_line_offset(len(self.ps)) return iter def __get_end(self): iter = self.__get_cursor() if not iter.ends_line(): iter.forward_to_line_end() return iter def __get_text(self, start, end): return self.buffer.get_text(start, end, False) def __move_cursor_to(self, iter): self.buffer.place_cursor(iter) self.buffer.move_mark_by_name("cursor", iter) def __move_cursor(self, howmany): iter = self.__get_cursor() end = self.__get_cursor() if not end.ends_line(): end.forward_to_line_end() line_len = end.get_line_offset() move_to = iter.get_line_offset() + howmany move_to = min(max(move_to, len(self.ps)), line_len) iter.set_line_offset(move_to) self.__move_cursor_to(iter) def __delete_at_cursor(self, howmany): iter = self.__get_cursor() end = self.__get_cursor() if not end.ends_line(): end.forward_to_line_end() line_len = end.get_line_offset() erase_to = iter.get_line_offset() + howmany if erase_to > line_len: erase_to = line_len elif erase_to < len(self.ps): erase_to = len(self.ps) end.set_line_offset(erase_to) self.__delete(iter, end) def __get_width(self): if not (self.flags() & gtk.REALIZED): return 80 layout = pango.Layout(self.get_pango_context()) letters = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" layout.set_text(letters) pix_width = layout.get_pixel_size()[0] return self.allocation.width * len(letters) / pix_width def __print_completions(self, completions): line_start = self.__get_text(self.__get_start(), self.__get_cursor()) line_end = self.__get_text(self.__get_cursor(), self.__get_end()) iter = self.buffer.get_end_iter() self.__move_cursor_to(iter) self.__insert(iter, "\n") width = max(self.__get_width(), 4) max_width = max([len(s) for s in completions]) n_columns = max(int(width / (max_width + 1)), 1) col_width = int(width / n_columns) total = len(completions) col_length = total / n_columns if total % n_columns: col_length = col_length + 1 col_length = max(col_length, 1) if col_length == 1: n_columns = total col_width = width / total for i in range(col_length): for j in range(n_columns): ind = i + j*col_length if ind < total: if j == n_columns - 1: n_spaces = 0 else: n_spaces = col_width - len(completions[ind]) self.__insert(iter, completions[ind] + " " * n_spaces) self.__insert(iter, "\n") self.__insert(iter, "%s%s%s" % (self.ps, line_start, line_end)) iter.set_line_offset(len(self.ps) + len(line_start)) self.__move_cursor_to(iter) self.scroll_to_mark(self.cursor, 0.2) def __complete(self): text = self.__get_text(self.__get_start(), self.__get_cursor()) start = '' word = text nonwords = self.nonword_re.findall(text) if nonwords: last = text.rfind(nonwords[-1]) + len(nonwords[-1]) start = text[:last] word = text[last:] completions = self.complete(word) if completions: prefix = _commonprefix(completions) if prefix != word: start_iter = self.__get_start() start_iter.forward_chars(len(start)) end_iter = start_iter.copy() end_iter.forward_chars(len(word)) self.__delete(start_iter, end_iter) self.__insert(end_iter, prefix) elif self.tab_pressed > 1: self.freeze_undo() self.__print_completions(completions) self.thaw_undo() self.tab_pressed = 0 def complete(self, text): return None def _get_line(self): start = self.__get_start() end = self.__get_end() return self.buffer.get_text(start, end, False) def __replace_line(self, new_text): start = self.__get_start() end = self.__get_end() self.__delete(start, end) self.__insert(end, new_text) def _commit(self): end = self.__get_cursor() if not end.ends_line(): end.forward_to_line_end() text = self._get_line() self.__move_cursor_to(end) self.freeze_undo() self.__insert(end, "\n") self.in_raw_input = False self.history.commit(text) self.do_raw_input(text) self.thaw_undo() def do_raw_input(self, text): pass class _Console(_ReadLine, code.InteractiveInterpreter): def __init__(self, locals=None, banner=None, completer=None, use_rlcompleter=True, start_script=None, quit_func=None): _ReadLine.__init__(self, quit_func) code.InteractiveInterpreter.__init__(self, locals) self.locals["__console__"] = self self.start_script = start_script self.completer = completer self.banner = banner if not self.completer and use_rlcompleter: try: import rlcompleter self.completer = rlcompleter.Completer() except ImportError: pass self.ps1 = ">>> " self.ps2 = "... " self.__start() self.run_on_raw_input = start_script self.raw_input(self.ps1) def __start(self): self.cmd_buffer = "" self.freeze_undo() self.thaw_undo() self.do_delete = True self.buffer.set_text("") self.do_delete = False if self.banner: iter = self.buffer.get_start_iter() self.buffer.insert_with_tags_by_name(iter, self.banner, "stdout") if not iter.starts_line(): self.buffer.insert(iter, "\n") def clear(self, start_script=None): if start_script is None: start_script = self.start_script else: self.start_script = start_script self.__start() self.run_on_raw_input = start_script def do_raw_input(self, text): if self.cmd_buffer: cmd = self.cmd_buffer + "\n" + text else: cmd = text saved_stdout, saved_stderr = sys.stdout, sys.stderr sys.stdout, sys.stderr = self._stdout, self._stderr if self.runsource(cmd): self.cmd_buffer = cmd ps = self.ps2 else: self.cmd_buffer = '' ps = self.ps1 sys.stdout, sys.stderr = saved_stdout, saved_stderr self.raw_input(ps) def do_command(self, code): try: eval(code, self.locals) except SystemExit: raise except: self.showtraceback() def runcode(self, code): if gtk.pygtk_version[1] < 8: self.do_command(code) else: self.emit("command", code) def exec_command(self, command): if self._get_line(): self._commit() self.buffer.insert_at_cursor(command) self._commit() def complete_attr(self, start, end): try: obj = eval(start, self.locals) strings = dir(obj) if end: completions = {} for s in strings: if s.startswith(end): completions[s] = None completions = completions.keys() else: completions = strings completions.sort() return [start + "." + s for s in completions] except: return None def complete(self, text): if self.completer: completions = [] i = 0 try: while 1: s = self.completer.complete(text, i) if s: completions.append(s) i = i + 1 else: completions.sort() return completions except NameError: return None dot = text.rfind(".") if dot >= 0: return self.complete_attr(text[:dot], text[dot+1:]) completions = {} strings = keyword.kwlist if self.locals: strings.extend(self.locals.keys()) try: strings.extend(eval("globals()", self.locals).keys()) except: pass try: exec "import __builtin__" in self.locals strings.extend(eval("dir(__builtin__)", self.locals)) except: pass for s in strings: if s.startswith(text): completions[s] = None completions = completions.keys() completions.sort() return completions def ReadLineType(t=gtk.TextView): class readline(t, _ReadLine): def __init__(self, *args, **kwargs): t.__init__(self) _ReadLine.__init__(self, *args, **kwargs) def do_key_press_event(self, event): return _ReadLine.do_key_press_event(self, event, t) gobject.type_register(readline) return readline def ConsoleType(t=gtk.TextView): class console_type(t, _Console): __gsignals__ = { 'command' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE, (object,)), 'key-press-event' : 'override' } def __init__(self, *args, **kwargs): if gtk.pygtk_version[1] < 8: gobject.GObject.__init__(self) else: t.__init__(self) _Console.__init__(self, *args, **kwargs) def do_command(self, code): return _Console.do_command(self, code) def do_key_press_event(self, event): return _Console.do_key_press_event(self, event, t) if gtk.pygtk_version[1] < 8: gobject.type_register(console_type) return console_type ReadLine = ReadLineType() Console = ConsoleType() def _make_window(): window = gtk.Window() window.set_title("pyconsole.py") swin = gtk.ScrolledWindow() swin.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_ALWAYS) window.add(swin) console = Console(banner="Hello there!", use_rlcompleter=False, start_script="from gtk import *\n") swin.add(console) window.set_default_size(500, 400) window.show_all() if not gtk.main_level(): window.connect("destroy", gtk.main_quit) gtk.main() return console if __name__ == '__main__': if len(sys.argv) < 2 or sys.argv[1] != '-gimp': _make_window()
jdburton/gimp-osx
src/gimp-2.6.12/plug-ins/pygimp/plug-ins/pyconsole.py
Python
gpl-2.0
20,993
0.00181
# Copyright 2015 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Updates generated docs from Python doc comments. Updates the documentation files. """ from __future__ import absolute_import from __future__ import division from __future__ import print_function import inspect import os import re _arg_re = re.compile(" *([*]{0,2}[a-zA-Z][a-zA-Z0-9_]*):") _section_re = re.compile("([A-Z][a-zA-Z ]*):$") _always_drop_symbol_re = re.compile("_[_a-zA-Z0-9]") _anchor_re = re.compile(r"^[\w.]+$") _member_mark = "@@" _indiv_dir = "functions_and_classes" _num_subdirs = 10 _subdir_prefix = "shard" class Document(object): """Base class for an automatically generated document.""" def write_markdown_to_file(self, f): """Writes a Markdown-formatted version of this document to file `f`. Args: f: The output file. """ raise NotImplementedError("Document.WriteToFile") class Index(Document): """An automatically generated index for a collection of documents.""" def __init__(self, module_to_name, members, filename_to_library_map, path_prefix): """Creates a new Index. Args: module_to_name: Dictionary mapping modules to short names. members: Dictionary mapping member name to (fullname, member). filename_to_library_map: A list of (filename, Library) pairs. The order corresponds to the order in which the libraries appear in the index. path_prefix: Prefix to add to links in the index. """ self._module_to_name = module_to_name self._members = members self._filename_to_library_map = filename_to_library_map self._path_prefix = path_prefix def write_markdown_to_file(self, f): """Writes this index to file `f`. The output is formatted as an unordered list. Each list element contains the title of the library, followed by a list of symbols in that library hyperlinked to the corresponding anchor in that library. Args: f: The output file. """ print("<!-- This file is machine generated: DO NOT EDIT! -->", file=f) print("", file=f) print("# TensorFlow Python reference documentation", file=f) print("", file=f) fullname_f = lambda name: self._members[name][0] anchor_f = lambda name: _get_anchor(self._module_to_name, fullname_f(name)) for filename, library in self._filename_to_library_map: sorted_names = sorted(library.mentioned, key=lambda x: (str.lower(x), x)) member_names = [n for n in sorted_names if n in self._members] # TODO(wicke): This is a hack that should be removed as soon as the # website code allows it. full_filename = self._path_prefix + filename links = ["[`%s`](%s#%s)" % (name, full_filename, anchor_f(name)) for name in member_names] if links: print("* **[%s](%s)**:" % (library.title, full_filename), file=f) for link in links: print(" * %s" % link, file=f) print("", file=f) def collect_members(module_to_name, exclude=()): """Collect all symbols from a list of modules. Args: module_to_name: Dictionary mapping modules to short names. exclude: Set of fully qualified names to exclude. Returns: Dictionary mapping name to (fullname, member) pairs. Raises: RuntimeError: if we can not resolve a name collision. """ members = {} for module, module_name in module_to_name.items(): all_names = getattr(module, "__all__", None) for name, member in inspect.getmembers(module): if ((inspect.isfunction(member) or inspect.isclass(member)) and not _always_drop_symbol_re.match(name) and (all_names is None or name in all_names)): fullname = "%s.%s" % (module_name, name) if fullname in exclude: continue if name in members: other_fullname, other_member = members[name] if member is not other_member: raise RuntimeError("Short name collision between %s and %s" % (fullname, other_fullname)) if len(fullname) == len(other_fullname): raise RuntimeError("Can't decide whether to use %s or %s for %s: " "both full names have length %d" % (fullname, other_fullname, name, len(fullname))) if len(fullname) > len(other_fullname): continue # Use the shorter full name members[name] = fullname, member return members def _get_anchor(module_to_name, fullname): """Turn a full member name into an anchor. Args: module_to_name: Dictionary mapping modules to short names. fullname: Fully qualified name of symbol. Returns: HTML anchor string. The longest module name prefix of fullname is removed to make the anchor. Raises: ValueError: If fullname uses characters invalid in an anchor. """ if not _anchor_re.match(fullname): raise ValueError("'%s' is not a valid anchor" % fullname) anchor = fullname for module_name in module_to_name.values(): if fullname.startswith(module_name + "."): rest = fullname[len(module_name)+1:] # Use this prefix iff it is longer than any found before if len(anchor) > len(rest): anchor = rest return anchor def _stable_hash(s): """A simple string hash that won't change from run to run.""" ret = 0 for c in s: ret = ret * 97 + ord(c) return ret class Library(Document): """An automatically generated document for a set of functions and classes.""" def __init__(self, title, module, module_to_name, members, documented, exclude_symbols=(), prefix=None): """Creates a new Library. Args: title: A human-readable title for the library. module: Module to pull high level docstring from (for table of contents, list of Ops to document, etc.). module_to_name: Dictionary mapping modules to short names. members: Dictionary mapping member name to (fullname, member). documented: Set of documented names to update. exclude_symbols: A list of specific symbols to exclude. prefix: A string to include at the beginning of the page. """ self._title = title self._module = module self._module_to_name = module_to_name self._members = dict(members) # Copy since we mutate it below self._exclude_symbols = frozenset(exclude_symbols) documented.update(exclude_symbols) self._documented = documented self._mentioned = set() self._prefix = prefix or "" @property def title(self): """The human-readable title for this library.""" return self._title @property def mentioned(self): """Set of names mentioned in this library.""" return self._mentioned @property def exclude_symbols(self): """Set of excluded symbols.""" return self._exclude_symbols def _should_include_member(self, name): """Returns True if this member should be included in the document.""" # Always exclude symbols matching _always_drop_symbol_re. if _always_drop_symbol_re.match(name): return False # Finally, exclude any specifically-excluded symbols. if name in self._exclude_symbols: return False return True def get_imported_modules(self, module): """Returns the list of modules imported from `module`.""" for name, member in inspect.getmembers(module): if inspect.ismodule(member): yield name, member def get_class_members(self, cls_name, cls): """Returns the list of class members to document in `cls`. This function filters the class member to ONLY return those defined by the class. It drops the inherited ones. Args: cls_name: Qualified name of `cls`. cls: An inspect object of type 'class'. Yields: name, member tuples. """ for name, member in inspect.getmembers(cls): # Only show methods and properties presently. In Python 3, # methods register as isfunction. is_method = inspect.ismethod(member) or inspect.isfunction(member) if not (is_method or isinstance(member, property)): continue if ((is_method and member.__name__ == "__init__") or self._should_include_member(name)): yield name, ("%s.%s" % (cls_name, name), member) def shard_dir(self, name): """Returns the path of the doc subdirectory for member `name`. When generating individual files for each function and class, we shard the files across several directories to avoid hitting the limit for files per directory. This function determines the subdirectory for a member based on a stable hash of its name. Args: name: string. The name of a function or class. Returns: The path to a subdirectory of the api docs directory. """ index = _stable_hash(name) % _num_subdirs return os.path.join(self.functions_and_classes_dir, _subdir_prefix + str(index)) def set_functions_and_classes_dir(self, dirname): """Sets the name of the directory for function and class markdown files. Args: dirname: string. The name of the directory in which to store function and class markdown files. """ self.functions_and_classes_dir = dirname def _generate_signature_for_function(self, func): """Given a function, returns a string representing its args.""" args_list = [] argspec = inspect.getargspec(func) first_arg_with_default = ( len(argspec.args or []) - len(argspec.defaults or [])) for arg in argspec.args[:first_arg_with_default]: if arg == "self": # Python documentation typically skips `self` when printing method # signatures. continue args_list.append(arg) # TODO(mrry): This is a workaround for documenting signature of # functions that have the @contextlib.contextmanager decorator. # We should do something better. if argspec.varargs == "args" and argspec.keywords == "kwds": original_func = func.__closure__[0].cell_contents return self._generate_signature_for_function(original_func) if argspec.defaults: for arg, default in zip( argspec.args[first_arg_with_default:], argspec.defaults): if callable(default): args_list.append("%s=%s" % (arg, default.__name__)) else: args_list.append("%s=%r" % (arg, default)) if argspec.varargs: args_list.append("*" + argspec.varargs) if argspec.keywords: args_list.append("**" + argspec.keywords) return "(" + ", ".join(args_list) + ")" def _remove_docstring_indent(self, docstring): """Remove indenting. We follow Python's convention and remove the minimum indent of the lines after the first, see: https://www.python.org/dev/peps/pep-0257/#handling-docstring-indentation preserving relative indentation. Args: docstring: A docstring. Returns: A list of strings, one per line, with the minimum indent stripped. """ docstring = docstring or "" lines = docstring.strip().split("\n") min_indent = len(docstring) for l in lines[1:]: l = l.rstrip() if l: i = 0 while i < len(l) and l[i] == " ": i += 1 if i < min_indent: min_indent = i for i in range(1, len(lines)): l = lines[i].rstrip() if len(l) >= min_indent: l = l[min_indent:] lines[i] = l return lines def _print_formatted_docstring(self, docstring, f): """Formats the given `docstring` as Markdown and prints it to `f`.""" lines = self._remove_docstring_indent(docstring) # Output the lines, identifying "Args" and other section blocks. i = 0 def _at_start_of_section(): """Returns the header if lines[i] is at start of a docstring section.""" l = lines[i] match = _section_re.match(l) if match and i + 1 < len( lines) and lines[i + 1].startswith(" "): return match.group(1) else: return None while i < len(lines): l = lines[i] section_header = _at_start_of_section() if section_header: if i == 0 or lines[i-1]: print("", file=f) # Use at least H4 to keep these out of the TOC. print("##### " + section_header + ":", file=f) print("", file=f) i += 1 outputting_list = False while i < len(lines): l = lines[i] # A new section header terminates the section. if _at_start_of_section(): break match = _arg_re.match(l) if match: if not outputting_list: # We need to start a list. In Markdown, a blank line needs to # precede a list. print("", file=f) outputting_list = True suffix = l[len(match.group()):].lstrip() print("* <b>`" + match.group(1) + "`</b>: " + suffix, file=f) else: # For lines that don't start with _arg_re, continue the list if it # has enough indentation. outputting_list &= l.startswith(" ") print(l, file=f) i += 1 else: print(l, file=f) i += 1 def _print_function(self, f, prefix, fullname, func): """Prints the given function to `f`.""" heading = prefix + " `" + fullname if not isinstance(func, property): heading += self._generate_signature_for_function(func) heading += "` {#%s}" % _get_anchor(self._module_to_name, fullname) print(heading, file=f) print("", file=f) self._print_formatted_docstring(inspect.getdoc(func), f) print("", file=f) def _write_member_markdown_to_file(self, f, prefix, name, member): """Print `member` to `f`.""" if (inspect.isfunction(member) or inspect.ismethod(member) or isinstance(member, property)): print("- - -", file=f) print("", file=f) self._print_function(f, prefix, name, member) print("", file=f) # Write an individual file for each function. if inspect.isfunction(member): indivf = open( os.path.join(self.shard_dir(name), name + ".md"), "w+") self._print_function(indivf, prefix, name, member) elif inspect.isclass(member): print("- - -", file=f) print("", file=f) print("%s `class %s` {#%s}" % (prefix, name, _get_anchor(self._module_to_name, name)), file=f) print("", file=f) self._write_class_markdown_to_file(f, name, member) print("", file=f) # Write an individual file for each class. indivf = open( os.path.join(self.shard_dir(name), name + ".md"), "w+") self._write_class_markdown_to_file(indivf, name, member) else: raise RuntimeError("Member %s has unknown type %s" % (name, type(member))) def _write_docstring_markdown_to_file(self, f, prefix, docstring, members, imports): for l in self._remove_docstring_indent(docstring): if l.startswith(_member_mark): name = l[len(_member_mark):].strip(" \t") if name in members: self._documented.add(name) self._mentioned.add(name) self._write_member_markdown_to_file(f, prefix, *members[name]) del members[name] elif name in imports: self._write_module_markdown_to_file(f, imports[name]) else: raise ValueError("%s: unknown member `%s`, markdown=`%s`." % ( self._title, name, l)) else: print(l, file=f) def _write_class_markdown_to_file(self, f, name, cls): """Write the class doc to `f`. Args: f: File to write to. name: name to use. cls: class object. """ # Build the list of class methods to document. methods = dict(self.get_class_members(name, cls)) # Used later to check if any methods were called out in the class # docstring. num_methods = len(methods) try: self._write_docstring_markdown_to_file(f, "####", inspect.getdoc(cls), methods, {}) except ValueError as e: raise ValueError(str(e) + " in class `%s`" % cls.__name__) # If some methods were not described, describe them now if they are # defined by the class itself (not inherited). If NO methods were # described, describe all methods. # # TODO(touts): when all methods have been categorized make it an error # if some methods are not categorized. any_method_called_out = (len(methods) != num_methods) if any_method_called_out: other_methods = {n: m for n, m in methods.items() if n in cls.__dict__} if other_methods: print("\n#### Other Methods", file=f) else: other_methods = methods for name in sorted(other_methods): self._write_member_markdown_to_file(f, "####", *other_methods[name]) def _write_module_markdown_to_file(self, f, module): imports = dict(self.get_imported_modules(module)) self._write_docstring_markdown_to_file(f, "###", inspect.getdoc(module), self._members, imports) def write_markdown_to_file(self, f): """Prints this library to file `f`. Args: f: File to write to. Returns: Dictionary of documented members. """ print("<!-- This file is machine generated: DO NOT EDIT! -->", file=f) print("", file=f) # TODO(touts): Do not insert these. Let the doc writer put them in # the module docstring explicitly. print("#", self._title, file=f) if self._prefix: print(self._prefix, file=f) print("[TOC]", file=f) print("", file=f) if self._module is not None: self._write_module_markdown_to_file(f, self._module) def write_other_members(self, f, catch_all=False): """Writes the leftover members to `f`. Args: f: File to write to. catch_all: If true, document all missing symbols from any module. Otherwise, document missing symbols from just this module. """ if catch_all: names = self._members.items() else: names = inspect.getmembers(self._module) all_names = getattr(self._module, "__all__", None) if all_names is not None: names = [(n, m) for n, m in names if n in all_names] leftovers = [] for name, _ in names: if name in self._members and name not in self._documented: leftovers.append(name) if leftovers: print("%s: undocumented members: %d" % (self._title, len(leftovers))) print("\n## Other Functions and Classes", file=f) for name in sorted(leftovers): print(" %s" % name) self._documented.add(name) self._mentioned.add(name) self._write_member_markdown_to_file(f, "###", *self._members[name]) def assert_no_leftovers(self): """Generate an error if there are leftover members.""" leftovers = [] for name in self._members: if name in self._members and name not in self._documented: leftovers.append(name) if leftovers: raise RuntimeError("%s: undocumented members: %s" % (self._title, ", ".join(leftovers))) def write_libraries(output_dir, libraries): """Write a list of libraries to disk. Args: output_dir: Output directory. libraries: List of (filename, library) pairs. """ files = [open(os.path.join(output_dir, k), "w") for k, _ in libraries] # Set the directory in which to save individual class and function md files, # creating it if it doesn't exist. Create subdirectories to avoid hitting # the limit for number of files in a directory. indiv_dir = os.path.join(output_dir, _indiv_dir) if not os.path.exists(indiv_dir): os.makedirs(indiv_dir) for i in range(0, _num_subdirs): subdir = os.path.join(indiv_dir, _subdir_prefix + str(i)) if not os.path.exists(subdir): os.makedirs(subdir) # Document mentioned symbols for all libraries for f, (_, v) in zip(files, libraries): v.set_functions_and_classes_dir(indiv_dir) v.write_markdown_to_file(f) # Document symbols that no library mentioned. We do this after writing # out all libraries so that earlier libraries know what later libraries # documented. for f, (_, v) in zip(files, libraries): v.write_other_members(f) f.close()
sachinpro/sachinpro.github.io
tensorflow/python/framework/docs.py
Python
apache-2.0
21,127
0.008425
#!/bin/false # This file is part of Espruino, a JavaScript interpreter for Microcontrollers # # Copyright (C) 2013 Gordon Williams <gw@pur3.co.uk> # # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. # # ---------------------------------------------------------------------------------------- # This file contains information for a specific board - the available pins, and where LEDs, # Buttons, and other in-built peripherals are. It is used to build documentation as well # as various source and header files for Espruino. # ---------------------------------------------------------------------------------------- import pinutils import json info = { 'name': 'Olimexino STM32 / Leaflabs Maple with STM32F103RET', 'link': ['https://www.olimex.com/Products/Duino/STM32/OLIMEXINO-STM32/', 'http://leaflabs.com/devices/maple/'], 'variables': 3250, 'binary_name': 'espruino_%v_olimexino_stm32_re.bin', 'build' : { 'defines' : [ 'USE_NET', 'USE_GRAPHICS', 'USE_FILESYSTEM', 'USE_TV', 'USE_HASHLIB' ] } } chip = { 'part': 'STM32F103RET6', 'family': 'STM32F1', 'package': 'LQFP64', 'ram': 64, 'flash': 512, 'speed': 72, 'usart': 5, 'spi': 3, 'i2c': 2, 'adc': 3, 'dac': 2 } devices = { 'OSC_RTC': {'pin_1': 'D22', 'pin_2': 'D23'}, 'LED1': {'pin': 'D13'}, 'LED2': {'pin': 'D3'}, 'BTN1': {'pin': 'D38'}, 'USB': {'pin_disc': 'D39', 'pin_dm': 'D40', 'pin_dp': 'D41'}, 'SD': {'pin_cs': 'D25', 'pin_di': 'D34', 'pin_do': 'D33', 'pin_clk': 'D32'}} # left-right, or top-bottom order board = { 'top': ['D14', 'GND', 'D13', 'D12', 'D11', 'D10', 'D9', 'D8', '', 'D7', 'D6', 'D5', 'D4', 'D3', 'D2', 'D1', 'D0'], 'bottom': ['RST', '3.3', '3.3A', 'GNDA', 'GND', 'VIN', '', 'A0', 'A1', 'A2', 'A3', 'A4', 'A5'], 'right': ['D23', 'D25', 'D27', 'D29', 'D31', 'D33', 'D35', 'D37'], 'right2': ['D24', 'D26', 'D28', 'D30', 'D32', 'D34', 'D36', 'GND'], 'left': ['3V3', 'D7', 'D29', 'D12', 'D13'], 'left2': ['GND', 'D8', 'D20', 'D11', 'D4'], '_pinmap': {'A0': 'D15', 'A1': 'D16', 'A2': 'D17', 'A3': 'D18', 'A4': 'D19', 'A5': 'D20'} } board['left'].reverse() board['left2'].reverse() board['right'].reverse() board['right2'].reverse() board["_css"] = """ #board { width: 540px; height: 418px; top: 300px; left: 200px; background-image: url(img/OLIMEXINO_STM32.jpg); } #boardcontainer { height: 850px; } #top { top: -20px; left: 140px; } #bottom { top: 431px; left: 220px; } #left { top: 155px; right: 520px; } #left2 { top:155px; left: 20px; } #right { top: 155px; left: 520px; } #right2 { top: 155px; right: 20px; } """; def get_pins(): pins = pinutils.scan_pin_file([], 'stm32f103xe.csv', 6, 10, 11) # Olimexino/Maple pins have stupid names pinmapping = {'D0': 'PA3', 'D1': 'PA2', 'D2': 'PA0', 'D3': 'PA1', 'D4': 'PB5', 'D5': 'PB6', 'D6': 'PA8', 'D7': 'PA9', 'D8': 'PA10', 'D9': 'PB7', 'D10': 'PA4', 'D11': 'PA7', 'D12': 'PA6', 'D13': 'PA5', 'D14': 'PB8', 'D15':'PC0', # shared with A0-A15 'D16': 'PC1', 'D17': 'PC2', 'D18': 'PC3', 'D19': 'PC4', 'D20': 'PC5', 'D21': 'PC13', 'D22': 'PC14', 'D23': 'PC15', 'D24': 'PB9', 'D25': 'PD2', 'D26': 'PC10', 'D27': 'PB0', 'D28': 'PB1', 'D29': 'PB10', 'D30': 'PB11', 'D31': 'PB12', 'D32': 'PB13', 'D33': 'PB14', 'D34': 'PB15', 'D35': 'PC6', 'D36': 'PC7', 'D37': 'PC8', 'D38':'PC9', # for button 'D39':'PC12', # for USB disc 'D40':'PA11', # for USB dm 'D41':'PA12', # for USB dp } newpins = [] for newname in pinmapping: pin = pinutils.findpin(pins, pinmapping[newname], True) pin['name'] = 'P' + newname pin['sortingname'] = newname[0] + newname[1:].rjust(2, '0') newpins.append(pin) # Because 'pinmapping' is NOT stored in order!!! newpins = sorted(newpins, key=lambda pin: pin['sortingname']) return newpins
muet/Espruino
boards/OLIMEXINO_STM32_RE.py
Python
mpl-2.0
4,169
0.015112
from urllib.parse import urljoin import pytest from adhocracy4.comments import models as comments_models from adhocracy4.ratings import models as rating_models @pytest.mark.django_db def test_delete_comment(comment_factory, rating_factory): comment = comment_factory() for i in range(5): comment_factory(content_object=comment) comment_count = comments_models.Comment.objects.all().count() rating_factory(content_object=comment) rating_count = rating_models.Rating.objects.all().count() assert comment_count == 6 assert rating_count == 1 comment.delete() comment_count = comments_models.Comment.objects.all().count() rating_count = rating_models.Rating.objects.all().count() assert comment_count == 0 assert rating_count == 0 @pytest.mark.django_db def test_save(comment_factory): comment_removed = comment_factory(comment='I am not yet removed') comment_censored = comment_factory(comment='I am not yet censored') assert comment_removed.comment == 'I am not yet removed' assert comment_censored.comment == 'I am not yet censored' comment_removed.is_removed = True comment_removed.save() comment_removed.refresh_from_db() comment_censored.is_censored = True comment_censored.save() comment_censored.refresh_from_db() assert comment_removed.comment == '' assert comment_censored.comment == '' @pytest.mark.django_db def test_str(comment_factory): short_comment = comment_factory(comment='I am so short') long_comment = comment_factory( comment='I am a very very very long comment. More than 200 ' 'characters. Yes yes yes. That long! Really that long. How long is ' 'that. Yes yes yes. That long! Really that long. How long is that. ' 'Yes yes yes. That long! Really that long. How long is that.' ) assert str(short_comment) == short_comment.comment assert str(long_comment) == "{} ...".format(long_comment.comment[:200]) @pytest.mark.django_db def test_get_absolute_url(comment, child_comment): # comment from factory has Question as content_object, which does not # define get_absolte_url, so url of module is used assert comment.get_absolute_url() == \ urljoin(comment.module.get_absolute_url(), "?comment={}".format(str(comment.id))) assert child_comment.get_absolute_url() == \ urljoin(child_comment.content_object.get_absolute_url(), "?comment={}".format(str(child_comment.id))) @pytest.mark.django_db def test_notification_content(comment): assert comment.notification_content == comment.comment @pytest.mark.django_db def test_project(comment): assert comment.project == comment.module.project @pytest.mark.django_db def test_module(comment, child_comment): assert comment.module == comment.content_object.module assert child_comment.module == \ child_comment.content_object.content_object.module
liqd/adhocracy4
tests/comments/test_model.py
Python
agpl-3.0
2,973
0
#!/usr/bin/env python """ A unittest script for the SampleAttribute module. """ import unittest import json from cutlass import SampleAttribute from CutlassTestConfig import CutlassTestConfig from CutlassTestUtil import CutlassTestUtil # pylint: disable=W0703, C1801 class SampleAttributeTest(unittest.TestCase): """ A unit test class for the SampleAttribute module. """ session = None util = None @classmethod def setUpClass(cls): """ Setup for the unittest. """ # Establish the session for each test method cls.session = CutlassTestConfig.get_session() cls.util = CutlassTestUtil() def testImport(self): """ Test the importation of the SampleAttribute module. """ success = False try: from cutlass import SampleAttribute success = True except Exception: pass self.failUnless(success) self.failIf(SampleAttribute is None) def testSessionCreate(self): """ Test the creation of a SampleAttribute via the session. """ success = False attrib = None try: attrib = self.session.create_sample_attr() success = True except Exception: pass self.failUnless(success) self.failIf(attrib is None) def testFecalCal(self): """ Test the fecalcal property. """ attrib = self.session.create_sample_attr() self.util.stringTypeTest(self, attrib, "fecalcal") self.util.stringPropertyTest(self, attrib, "fecalcal") def testSampleDesc(self): """ Test the sample_desc property. """ attrib = self.session.create_sample_attr() self.util.stringTypeTest(self, attrib, "sample_desc") self.util.stringPropertyTest(self, attrib, "sample_desc") def testSampleType(self): """ Test the sample_type property. """ attrib = self.session.create_sample_attr() self.util.stringTypeTest(self, attrib, "sample_type") self.util.stringPropertyTest(self, attrib, "sample_type") def testSubproject(self): """ Test the subproject property. """ attrib = self.session.create_sample_attr() self.util.stringTypeTest(self, attrib, "subproject") self.util.stringPropertyTest(self, attrib, "subproject") def testToJson(self): """ Test the generation of JSON from a SampleAttribute instance. """ attrib = self.session.create_sample_attr() success = False fecalcal = "test fecalcal" attrib.fecalcal = fecalcal attrib_json = None try: attrib_json = attrib.to_json() success = True except Exception: pass self.assertTrue(success, "Able to use 'to_json'.") self.assertTrue(attrib_json is not None, "to_json() returned data.") parse_success = False try: attrib_data = json.loads(attrib_json) parse_success = True except Exception: pass self.assertTrue(parse_success, "to_json() did not throw an exception.") self.assertTrue(attrib_data is not None, "to_json() returned parsable JSON.") self.assertTrue('meta' in attrib_data, "JSON has 'meta' key in it.") self.assertEqual(attrib_data['meta']['fecalcal'], fecalcal, "'fecalcal' in JSON had expected value." ) def testDataInJson(self): """ Test if the correct data is in the generated JSON. """ attrib = self.session.create_sample_attr() success = False fecalcal = "test fecalcal" sample_desc = "DNA: mom-vaginal" sample_type = "BC1D" subproject = "earlyPregStudy" attrib.fecalcal = fecalcal attrib.sample_desc = sample_desc attrib.sample_type = sample_type attrib.subproject = subproject attrib_json = None try: attrib_json = attrib.to_json() success = True except Exception: pass self.assertTrue(success, "Able to use 'to_json'.") self.assertTrue(attrib_json is not None, "to_json() returned data.") parse_success = False try: attrib_data = json.loads(attrib_json) parse_success = True except Exception: pass self.assertTrue(parse_success, "to_json() did not throw an exception.") self.assertTrue(attrib_data is not None, "to_json() returned parsable JSON.") self.assertTrue('meta' in attrib_data, "JSON has 'meta' key in it.") self.assertEqual(attrib_data['meta']['fecalcal'], fecalcal, "'fecalcal' in JSON had expected value." ) self.assertEqual(attrib_data['meta']['sample_desc'], sample_desc, "'sample_desc' in JSON had expected value." ) self.assertEqual(attrib_data['meta']['sample_type'], sample_type, "'sample_type' in JSON had expected value." ) self.assertEqual(attrib_data['meta']['subproject'], subproject, "'subproject' in JSON had expected value." ) def testId(self): """ Test the id property. """ attrib = self.session.create_sample_attr() self.assertTrue(attrib.id is None, "New template sample attribute has no ID.") with self.assertRaises(AttributeError): attrib.id = "test" def testVersion(self): """ Test the version property. """ attrib = self.session.create_sample_attr() self.assertTrue(attrib.version is None, "New template sample attribute has no version.") with self.assertRaises(ValueError): attrib.version = "test" def testTags(self): """ Test the tags property. """ attrib = self.session.create_sample_attr() tags = attrib.tags self.assertTrue(type(tags) == list, "SampleAttribute tags() method returns a list.") self.assertEqual(len(tags), 0, "Template sample attribute tags list is empty.") new_tags = ["tagA", "tagB"] attrib.tags = new_tags self.assertEqual(attrib.tags, new_tags, "Can set tags on a sample attribute.") json_str = attrib.to_json() doc = json.loads(json_str) self.assertTrue('tags' in doc['meta'], "JSON representation has 'tags' field in 'meta'.") self.assertEqual(doc['meta']['tags'], new_tags, "JSON representation had correct tags after setter.") def testAddTag(self): """ Test the add_tag() method. """ attrib = self.session.create_sample_attr() attrib.add_tag("test") self.assertEqual(attrib.tags, ["test"], "Can add a tag to a sample attribute.") json_str = attrib.to_json() doc = json.loads(json_str) self.assertEqual(doc['meta']['tags'], ["test"], "JSON representation had correct tags after add_tag().") # Try adding the same tag yet again, shouldn't get a duplicate with self.assertRaises(ValueError): attrib.add_tag("test") json_str = attrib.to_json() doc2 = json.loads(json_str) self.assertEqual(doc2['meta']['tags'], ["test"], "JSON document did not end up with duplicate tags.") def testRequiredFields(self): """ Test the required_fields() static method. """ required = SampleAttribute.required_fields() self.assertEqual(type(required), tuple, "required_fields() returns a tuple.") self.assertTrue(len(required) > 0, "required_field() did not return empty value.") def testLoadSaveDeleteAnnotation(self): """ Extensive test for the load, edit, save and delete functions. """ # Attempt to save the sample at all points before and after adding # the required fields attrib = self.session.create_sample_attr() self.assertFalse( attrib.save(), "SampleAttribute not saved successfully, no required fields" ) attrib.fecalcal = "test fecalcal" self.assertFalse( attrib.save(), "SampleAttribute not saved successfully, missing some required fields." ) # SampleAttribute nodes are "associated_with" sample nodes attrib.links = {"associated_with": ["610a4911a5ca67de12cdc1e4b4011876"]} attrib.study = "prediabetes" attrib.add_tag("test") # Add some of the optional properties attrib.sample_desc = "DNA: mom-vaginal" attrib.sample_type = "BC1D" # Make sure annotation does not delete if it does not exist with self.assertRaises(Exception): attrib.delete() self.assertTrue(attrib.save() is True, "SampleAttribute was saved successfully.") # Load the annotation that was just saved from the OSDF instance attrib_loaded = self.session.create_sample_attr() attrib_loaded = attrib_loaded.load(attrib.id) # Check all fields were saved and loaded successfully self.assertEqual(attrib.fecalcal, attrib_loaded.fecalcal, "SampleAttribute fecalcal not saved & loaded successfully") self.assertEqual(attrib.tags[0], attrib_loaded.tags[0], "SampleAttribute tags not saved & loaded successfully") # SampleAttribute is deleted successfully self.assertTrue(attrib.delete(), "SampleAttribute was deleted successfully.") # the node of the initial ID should not load successfully load_test = self.session.create_sample_attr() with self.assertRaises(Exception): load_test = load_test.load(attrib.id) if __name__ == '__main__': unittest.main()
ihmpdcc/cutlass
tests/test_sample_attr.py
Python
mit
10,454
0.001148
# -*- coding: utf-8 -*- import datetime from south.db import db from south.v2 import DataMigration from django.db import models from ..utils.pypi import normalize_name class Migration(DataMigration): def forwards(self, orm): "Write your forwards methods here." for package in orm['folivora.Package'].objects.all(): package.normalized_name = normalize_name(package.name) package.save() def backwards(self, orm): "Write your backwards methods here." models = { 'auth.group': { 'Meta': {'object_name': 'Group'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) }, 'auth.permission': { 'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'}, 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) }, 'auth.user': { 'Meta': {'object_name': 'User'}, 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) }, 'contenttypes.contenttype': { 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) }, 'folivora.log': { 'Meta': {'object_name': 'Log'}, 'action': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'data': ('django_orm.postgresql.hstore.fields.DictionaryField', [], {}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'package': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['folivora.Package']", 'null': 'True', 'blank': 'True'}), 'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['folivora.Project']"}), 'type': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}), 'when': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}) }, 'folivora.package': { 'Meta': {'unique_together': "(('name', 'provider'),)", 'object_name': 'Package'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'initial_sync_done': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}), 'normalized_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'unique': 'True', 'null': 'True'}), 'provider': ('django.db.models.fields.CharField', [], {'default': "'pypi'", 'max_length': '255'}), 'url': ('django.db.models.fields.URLField', [], {'max_length': '200'}) }, 'folivora.packageversion': { 'Meta': {'unique_together': "(('package', 'version'),)", 'object_name': 'PackageVersion'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'package': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'versions'", 'to': "orm['folivora.Package']"}), 'release_date': ('django.db.models.fields.DateTimeField', [], {}), 'version': ('django.db.models.fields.CharField', [], {'max_length': '255'}) }, 'folivora.project': { 'Meta': {'object_name': 'Project'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'members': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.User']", 'through': "orm['folivora.ProjectMember']", 'symmetrical': 'False'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}) }, 'folivora.projectdependency': { 'Meta': {'unique_together': "(('project', 'package'),)", 'object_name': 'ProjectDependency'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'package': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['folivora.Package']"}), 'project': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'dependencies'", 'to': "orm['folivora.Project']"}), 'update': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['folivora.PackageVersion']", 'null': 'True', 'blank': 'True'}), 'version': ('django.db.models.fields.CharField', [], {'max_length': '255'}) }, 'folivora.projectmember': { 'Meta': {'unique_together': "(('project', 'user'),)", 'object_name': 'ProjectMember'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'jabber': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}), 'mail': ('django.db.models.fields.EmailField', [], {'max_length': '255', 'blank': 'True'}), 'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['folivora.Project']"}), 'state': ('django.db.models.fields.IntegerField', [], {}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}) }, 'folivora.syncstate': { 'Meta': {'object_name': 'SyncState'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'last_sync': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'state': ('django.db.models.fields.CharField', [], {'default': "'running'", 'max_length': '255'}), 'type': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}) }, 'folivora.userprofile': { 'Meta': {'object_name': 'UserProfile'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'jabber': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}), 'language': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}), 'timezone': ('django.db.models.fields.CharField', [], {'default': "'UTC'", 'max_length': '255'}), 'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True'}) } } complete_apps = ['folivora'] symmetrical = True
rocketDuck/folivora
folivora/migrations/0007_normalize_names.py
Python
isc
9,029
0.007864
__author__ = "Steffen Vogel" __copyright__ = "Copyright 2015, Steffen Vogel" __license__ = "GPLv3" __maintainer__ = "Steffen Vogel" __email__ = "post@steffenvogel.de" """ This file is part of transWhat transWhat is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or any later version. transwhat is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with transWhat. If not, see <http://www.gnu.org/licenses/>. """ from Spectrum2 import protocol_pb2 import logging import time import utils import base64 import deferred from deferred import call class Buddy(): def __init__(self, owner, number, nick, statusMsg, groups, image_hash): self.nick = nick self.owner = owner self.number = number self.groups = groups self.image_hash = image_hash if image_hash is not None else "" self.statusMsg = u"" self.lastseen = 0 self.presence = 0 def update(self, nick, groups, image_hash): self.nick = nick self.groups = groups if image_hash is not None: self.image_hash = image_hash def __str__(self): return "%s (nick=%s)" % (self.number, self.nick) class BuddyList(dict): def __init__(self, owner, backend, user, session): self.owner = owner self.backend = backend self.session = session self.user = user self.logger = logging.getLogger(self.__class__.__name__) self.synced = False def _load(self, buddies): for buddy in buddies: number = buddy.buddyName nick = buddy.alias statusMsg = buddy.statusMessage.decode('utf-8') groups = [g for g in buddy.group] image_hash = buddy.iconHash self[number] = Buddy(self.owner, number, nick, statusMsg, groups, image_hash) self.logger.debug("Update roster") contacts = self.keys() contacts.remove('bot') if self.synced == False: self.session.sendSync(contacts, delta = False, interactive = True) self.synced = True self.logger.debug("Roster add: %s", str(list(contacts))) for number in contacts: buddy = self[number] self.backend.handleBuddyChanged(self.user, number, buddy.nick, buddy.groups, protocol_pb2.STATUS_NONE, iconHash = buddy.image_hash if buddy.image_hash is not None else "") self.session.subscribePresence(number) self.logger.debug("%s is requesting statuses of: %s", self.user, contacts) self.session.requestStatuses(contacts, success = self.onStatus) def onStatus(self, contacts): self.logger.debug("%s received statuses of: %s", self.user, contacts) for number, (status, time) in contacts.iteritems(): buddy = self[number] if status is None: buddy.statusMsg = "" else: buddy.statusMsg = utils.softToUni(status) self.updateSpectrum(buddy) def load(self, buddies): if self.session.loggedIn: self._load(buddies) else: self.session.loginQueue.append(lambda: self._load(buddies)) def update(self, number, nick, groups, image_hash): if number in self: buddy = self[number] buddy.update(nick, groups, image_hash) else: buddy = Buddy(self.owner, number, nick, "", groups, image_hash) self[number] = buddy self.logger.debug("Roster add: %s", buddy) self.session.sendSync([number], delta = True, interactive = True) self.session.subscribePresence(number) self.session.requestStatuses([number], success = self.onStatus) if image_hash == "" or image_hash is None: self.requestVCard(number) self.updateSpectrum(buddy) return buddy def updateSpectrum(self, buddy): if buddy.presence == 0: status = protocol_pb2.STATUS_NONE elif buddy.presence == 'unavailable': status = protocol_pb2.STATUS_AWAY else: status = protocol_pb2.STATUS_ONLINE statusmsg = buddy.statusMsg if buddy.lastseen != 0: timestamp = time.localtime(buddy.lastseen) statusmsg += time.strftime("\n Last seen: %a, %d %b %Y %H:%M:%S", timestamp) iconHash = buddy.image_hash if buddy.image_hash is not None else "" self.logger.debug("Updating buddy %s (%s) in %s, image_hash = %s", buddy.nick, buddy.number, buddy.groups, iconHash) self.logger.debug("Status Message: %s", statusmsg) self.backend.handleBuddyChanged(self.user, buddy.number, buddy.nick, buddy.groups, status, statusMessage=statusmsg, iconHash=iconHash) def remove(self, number): try: buddy = self[number] del self[number] self.backend.handleBuddyChanged(self.user, number, "", [], protocol_pb2.STATUS_NONE) self.backend.handleBuddyRemoved(self.user, number) self.session.unsubscribePresence(number) # TODO Sync remove return buddy except KeyError: return None def requestVCard(self, buddy, ID=None): if buddy == self.user or buddy == self.user.split('@')[0]: buddy = self.session.legacyName # Get profile picture self.logger.debug('Requesting profile picture of %s', buddy) response = deferred.Deferred() self.session.requestProfilePicture(buddy, onSuccess = response.run) response = response.arg(0) pictureData = response.pictureData() # Send VCard if ID != None: call(self.logger.debug, 'Sending VCard (%s) with image id %s: %s', ID, response.pictureId(), pictureData.then(base64.b64encode)) call(self.backend.handleVCard, self.user, ID, buddy, "", "", pictureData) # Send image hash if not buddy == self.session.legacyName: try: obuddy = self[buddy] nick = obuddy.nick groups = obuddy.groups except KeyError: nick = "" groups = [] image_hash = pictureData.then(utils.sha1hash) call(self.logger.debug, 'Image hash is %s', image_hash) call(self.update, buddy, nick, groups, image_hash)
NeoBelerophon/transwhat
buddy.py
Python
gpl-3.0
5,903
0.027444
import _plotly_utils.basevalidators class ColorValidator(_plotly_utils.basevalidators.ColorValidator): def __init__( self, plotly_name="color", parent_name="histogram2dcontour.textfont", **kwargs ): super(ColorValidator, self).__init__( plotly_name=plotly_name, parent_name=parent_name, edit_type=kwargs.pop("edit_type", "style"), **kwargs )
plotly/plotly.py
packages/python/plotly/plotly/validators/histogram2dcontour/textfont/_color.py
Python
mit
424
0.002358
#!/usr/bin/python # -*- coding: utf-8 -*- from gobspy import main main()
arypbatista/gobspy
gobspy.py
Python
gpl-3.0
75
0
# -*- coding: utf-8 -*- import time class Timer(object): ''' Simple timer control ''' def __init__(self, delay): self.current_time = 0 self.set_delay(delay) def pause(self, pause): if pause >= self.delay: self.current_time = time.clock() self.next_time = self.current_time + pause def set_delay(self, delay): assert delay >= 0 self.delay = delay if self.delay == 0: self.next_time = self.current_time else: self.current_time = time.clock() self.next_time = self.current_time + self.delay def idle(self): ''' Verify if the timer is idle ''' self.current_time = time.clock() ## if next frame occurs in the future, now it's idle time if self.next_time > self.current_time: return True # if pass more than one delay time, synchronize it if (self.current_time - self.next_time) > self.delay: self.next_time = self.current_time + self.delay else: self.next_time += self.delay return False
pantuza/art-gallery
src/timer.py
Python
gpl-2.0
1,202
0.003328
import random def say_thing(): return random.choice([ "dude", "sup", "yo mama" ])
foolwealth/django_multi_deployable_template
lib/lib_a/helper.py
Python
mit
115
0.008696
VERSION = '0.1.3.0' PROGRAM = 'dsame' DESCRIPTION = 'dsame is a program to decode EAS/SAME alert messages' COPYRIGHT = 'Copyright (C) 2016 Joseph W. Metcalf' TEST_STRING = 'EAS: ZCZC-WXR-RWT-055027-055039-055047-055117-055131-055137-055139-055015-055071+0030-0771800-KMKX/NWS-' MSG__TEXT={ 'EN' : {'MSG1' : '{article} {organization} {preposition} {location} {has} issued a {event} valid until {end}', 'MSG2' : '{conjunction} for the following {division} in {state}: ', 'MSG3' : '{county}{punc} ', 'MSG4' : '', 'AND' : 'and', 'ALL' : 'all', 'HAS' : 'has', 'HAVE' : 'have', 'THE' : 'the', 'A' : 'a', 'IN' : 'in', '' : '', } } FIPS_DIVN={ '' : None, '02' : 'boroughs', '22' : 'parishes', '11' : None, '57' : None, '58' : None, '59' : None, '60' : None, '61' : None, '64' : None, '65' : None, '66' : None, '68' : None, '69' : None, '70' : None, '73' : None, '74' : None, '75' : None, '77' : None, '78' : None, '91' : None, '92' : None, '93' : None, '94' : None, '96' : None, '97' : None, '98' : None, } US_SAME_AREA={ 'LOCATION' : 'US', '01' : 'Alabama', '02' : 'Alaska', '04' : 'Arizona', '05' : 'Arkansas', '06' : 'California', '08' : 'Colorado', '09' : 'Connecticut', '10' : 'Delaware', '11' : 'District of Columbia', '12' : 'Florida', '13' : 'Georgia', '15' : 'Hawaii', '16' : 'Idaho', '17' : 'Illinois', '18' : 'Indiana', '19' : 'Iowa', '20' : 'Kansas', '21' : 'Kentucky', '22' : 'Louisiana', '23' : 'Maine', '24' : 'Maryland', '25' : 'Massachusetts', '26' : 'Michigan', '27' : 'Minnesota', '28' : 'Mississippi', '29' : 'Missouri', '30' : 'Montana', '31' : 'Nebraska', '32' : 'Nevada', '33' : 'New Hampshire', '34' : 'New Jersey', '35' : 'New Mexico', '36' : 'New York', '37' : 'North Carolina', '38' : 'North Dakota', '39' : 'Ohio', '40' : 'Oklahoma', '41' : 'Oregon', '42' : 'Pennsylvania', '44' : 'Rhode Island', '45' : 'South Carolina', '46' : 'South Dakota', '47' : 'Tennessee', '48' : 'Texas', '49' : 'Utah', '50' : 'Vermont', '51' : 'Virginia', '53' : 'Washington', '54' : 'West Virginia', '55' : 'Wisconsin', '56' : 'Wyoming', '57' : 'Pacific Coast from Washington to California', '58' : 'Alaskan Coast', '59' : 'Hawaiian Coast', '60' : 'American Samoa', '61' : 'American Samoa Waters', '64' : 'Federated States of Micronesia', '65' : 'Mariana Islands Waters (including Guam)', '66' : 'Guam', '68' : 'Marshall Islands', '69' : 'Northern Mariana Islands', '70' : 'Palau', '72' : 'Puerto Rico', '73' : 'Atlantic Coast from Maine to Virginia', '74' : 'U.S. Minor Outlying Islands', '75' : 'Atlantic Coast from North Carolina to Florida, and the Coasts of Puerto Rico and Virgin Islands', '77' : 'Gulf of Mexico', '78' : 'Virgin Islands', '91' : 'Lake Superior', '92' : 'Lake Michigan', '93' : 'Lake Huron', '94' : 'Saint Clair River, Detroit River, and Lake Saint Clair', '96' : 'Lake Erie', '97' : 'Niagara River and Lake Ontario', '98' : 'Saint Lawrence River', 'XX' : 'TEST', } CA_SAME_AREA={ 'LOCATION' : 'CA', '11' : 'Nova Scotia', '12' : 'Nova Scotia', '13' : 'Nova Scotia', '14' : 'Nova Scotia', '15' : 'New Brunswick', '16' : 'New Brunswick', '17' : 'New Brunswick', '18' : 'Prince Edward Island', '21' : 'Newfoundland/Labrador', '22' : 'Newfoundland/Labrador', '23' : 'Newfoundland/Labrador', '24' : 'Newfoundland/Labrador', '25' : 'Newfoundland/Labrador', '26' : 'Newfoundland/Labrador', '27' : 'Newfoundland/Labrador', '28' : 'Newfoundland/Labrador', '29' : 'Newfoundland/Labrador', '31' : 'Quebec', '32' : 'Quebec', '33' : 'Quebec', '34' : 'Quebec', '35' : 'Quebec', '36' : 'Quebec', '37' : 'Quebec', '38' : 'Quebec', '39' : 'Quebec', '41' : 'Ontario', '42' : 'Ontario', '43' : 'Ontario', '44' : 'Ontario', '45' : 'Ontario', '46' : 'Ontario', '47' : 'Ontario', '48' : 'Ontario', '49' : 'Ontario', '51' : 'Manitoba', '52' : 'Manitoba', '53' : 'Manitoba', '54' : 'Manitoba', '55' : 'Manitoba', '56' : 'Manitoba', '57' : 'Manitoba', '58' : 'Manitoba', '59' : 'Manitoba', '61' : 'Saskatchewan', '62' : 'Saskatchewan', '63' : 'Saskatchewan', '64' : 'Saskatchewan', '65' : 'Saskatchewan', '66' : 'Saskatchewan', '67' : 'Saskatchewan', '68' : 'Saskatchewan', '71' : 'Alberta', '72' : 'Alberta', '73' : 'Alberta', '74' : 'Alberta', '75' : 'Alberta', '76' : 'Alberta', '77' : 'Alberta', '78' : 'Alberta', '79' : 'Alberta', '81' : 'British Columbia', '82' : 'British Columbia', '83' : 'British Columbia', '84' : 'British Columbia', '85' : 'British Columbia', '86' : 'British Columbia', '87' : 'British Columbia', '88' : 'British Columbia', '89' : 'British Columbia', '91' : 'Yukon', '92' : 'Yukon', '93' : 'Yukon', '94' : 'Northwest Territories', '95' : 'Northwest Territories', '96' : 'Northwest Territories', '97' : 'Nunavut', '98' : 'Nunavut', '99' : 'Nunavut', 'XX' : 'TEST', } US_SAME_CODE={ '01001' : 'Autauga', '01003' : 'Baldwin', '01005' : 'Barbour', '01007' : 'Bibb', '01009' : 'Blount', '01011' : 'Bullock', '01013' : 'Butler', '01015' : 'Calhoun', '01017' : 'Chambers', '01019' : 'Cherokee', '01021' : 'Chilton', '01023' : 'Choctaw', '01025' : 'Clarke', '01027' : 'Clay', '01029' : 'Cleburne', '01031' : 'Coffee', '01033' : 'Colbert', '01035' : 'Conecuh', '01037' : 'Coosa', '01039' : 'Covington', '01041' : 'Crenshaw', '01043' : 'Cullman', '01045' : 'Dale', '01047' : 'Dallas', '01049' : 'Dekalb', '01051' : 'Elmore', '01053' : 'Escambia', '01055' : 'Etowah', '01057' : 'Fayette', '01059' : 'Franklin', '01061' : 'Geneva', '01063' : 'Greene', '01065' : 'Hale', '01067' : 'Henry', '01069' : 'Houston', '01071' : 'Jackson', '01073' : 'Jefferson', '01075' : 'Lamar', '01077' : 'Lauderdale', '01079' : 'Lawrence', '01081' : 'Lee', '01083' : 'Limestone', '01085' : 'Lowndes', '01087' : 'Macon', '01089' : 'Madison', '01091' : 'Marengo', '01093' : 'Marion', '01095' : 'Marshall', '01097' : 'Mobile', '01099' : 'Monroe', '01101' : 'Montgomery', '01103' : 'Morgan', '01105' : 'Perry', '01107' : 'Pickens', '01109' : 'Pike', '01111' : 'Randolph', '01113' : 'Russell', '01115' : 'Saint Clair', '01117' : 'Shelby', '01119' : 'Sumter', '01121' : 'Talladega', '01123' : 'Tallapoosa', '01125' : 'Tuscaloosa', '01127' : 'Walker', '01129' : 'Washington', '01131' : 'Wilcox', '01133' : 'Winston', '02013' : 'Aleutians East', '02016' : 'Aleutians West', '02020' : 'Anchorage', '02050' : 'Bethel', '02060' : 'Bristol Bay', '02068' : 'Denali', '02070' : 'Dillingham', '02090' : 'Fairbanks North Star', '02100' : 'Haines', '02110' : 'Juneau', '02122' : 'Kenai Peninsula', '02130' : 'Ketchikan Gateway', '02150' : 'Kodiak Island', '02164' : 'Lake and Peninsula', '02170' : 'Matanuska-Susitna', '02180' : 'Nome', '02185' : 'North Slope', '02188' : 'Northwest Arctic', '02201' : 'Prince of Wales-Outer Ketchikan', '02220' : 'Sitka', '02232' : 'Skagway-Hoonah-Angoon', '02240' : 'Southeast Fairbanks', '02261' : 'Valdez-Cordova', '02270' : 'Wade Hampton', '02280' : 'Wrangell-Petersburg', '02282' : 'Yakutat', '02290' : 'Yukon-Koyukuk', '04001' : 'Apache', '04003' : 'Cochise', '04005' : 'Coconino', '04007' : 'Gila', '04009' : 'Graham', '04011' : 'Greenlee', '04012' : 'La Paz', '04013' : 'Maricopa', '04015' : 'Mohave', '04017' : 'Navajo', '04019' : 'Pima', '04021' : 'Pinal', '04023' : 'Santa Cruz', '04025' : 'Yavapai', '04027' : 'Yuma', '05001' : 'Arkansas', '05003' : 'Ashley', '05005' : 'Baxter', '05007' : 'Benton', '05009' : 'Boone', '05011' : 'Bradley', '05013' : 'Calhoun', '05015' : 'Carroll', '05017' : 'Chicot', '05019' : 'Clark', '05021' : 'Clay', '05023' : 'Cleburne', '05025' : 'Cleveland', '05027' : 'Columbia', '05029' : 'Conway', '05031' : 'Craighead', '05033' : 'Crawford', '05035' : 'Crittenden', '05037' : 'Cross', '05039' : 'Dallas', '05041' : 'Desha', '05043' : 'Drew', '05045' : 'Faulkner', '05047' : 'Franklin', '05049' : 'Fulton', '05051' : 'Garland', '05053' : 'Grant', '05055' : 'Greene', '05057' : 'Hempstead', '05059' : 'Hot Spring', '05061' : 'Howard', '05063' : 'Independence', '05065' : 'Izard', '05067' : 'Jackson', '05069' : 'Jefferson', '05071' : 'Johnson', '05073' : 'Lafayette', '05075' : 'Lawrence', '05077' : 'Lee', '05079' : 'Lincoln', '05081' : 'Little River', '05083' : 'Logan', '05085' : 'Lonoke', '05087' : 'Madison', '05089' : 'Marion', '05091' : 'Miller', '05093' : 'Mississippi', '05095' : 'Monroe', '05097' : 'Montgomery', '05099' : 'Nevada', '05101' : 'Newton', '05103' : 'Ouachita', '05105' : 'Perry', '05107' : 'Phillips', '05109' : 'Pike', '05111' : 'Poinsett', '05113' : 'Polk', '05115' : 'Pope', '05117' : 'Prairie', '05119' : 'Pulaski', '05121' : 'Randolph', '05123' : 'Saint Francis', '05125' : 'Saline', '05127' : 'Scott', '05129' : 'Searcy', '05131' : 'Sebastian', '05133' : 'Sevier', '05135' : 'Sharp', '05137' : 'Stone', '05139' : 'Union', '05141' : 'Van Buren', '05143' : 'Washington', '05145' : 'White', '05147' : 'Woodruff', '05149' : 'Yell', '06001' : 'Alameda', '06003' : 'Alpine', '06005' : 'Amador', '06007' : 'Butte', '06009' : 'Calaveras', '06011' : 'Colusa', '06013' : 'Contra Costa', '06015' : 'Del Norte', '06017' : 'El Dorado', '06019' : 'Fresno', '06021' : 'Glenn', '06023' : 'Humboldt', '06025' : 'Imperial', '06027' : 'Inyo', '06029' : 'Kern', '06031' : 'Kings', '06033' : 'Lake', '06035' : 'Lassen', '06037' : 'Los Angeles', '06039' : 'Madera', '06041' : 'Marin', '06043' : 'Mariposa', '06045' : 'Mendocino', '06047' : 'Merced', '06049' : 'Modoc', '06051' : 'Mono', '06053' : 'Monterey', '06055' : 'Napa', '06057' : 'Nevada', '06059' : 'Orange', '06061' : 'Placer', '06063' : 'Plumas', '06065' : 'Riverside', '06067' : 'Sacramento', '06069' : 'San Benito', '06071' : 'San Bernardino', '06073' : 'San Diego', '06075' : 'San Francisco', '06077' : 'San Joaquin', '06079' : 'San Luis Obispo', '06081' : 'San Mateo', '06083' : 'Santa Barbara', '06085' : 'Santa Clara', '06087' : 'Santa Cruz', '06089' : 'Shasta', '06091' : 'Sierra', '06093' : 'Siskiyou', '06095' : 'Solano', '06097' : 'Sonoma', '06099' : 'Stanislaus', '06101' : 'Sutter', '06103' : 'Tehama', '06105' : 'Trinity', '06107' : 'Tulare', '06109' : 'Tuolumne', '06111' : 'Ventura', '06113' : 'Yolo', '06115' : 'Yuba', '08001' : 'Adams', '08003' : 'Alamosa', '08005' : 'Arapahoe', '08007' : 'Archuleta', '08009' : 'Baca', '08011' : 'Bent', '08013' : 'Boulder', '08014' : 'Broomfield', '08015' : 'Chaffee', '08017' : 'Cheyenne', '08019' : 'Clear Creek', '08021' : 'Conejos', '08023' : 'Costilla', '08025' : 'Crowley', '08027' : 'Custer', '08029' : 'Delta', '08031' : 'Denver', '08033' : 'Dolores', '08035' : 'Douglas', '08037' : 'Eagle', '08039' : 'Elbert', '08041' : 'El Paso', '08043' : 'Fremont', '08045' : 'Garfield', '08047' : 'Gilpin', '08049' : 'Grand', '08051' : 'Gunnison', '08053' : 'Hinsdale', '08055' : 'Huerfano', '08057' : 'Jackson', '08059' : 'Jefferson', '08061' : 'Kiowa', '08063' : 'Kit Carson', '08065' : 'Lake', '08067' : 'La Plata', '08069' : 'Larimer', '08071' : 'Las Animas', '08073' : 'Lincoln', '08075' : 'Logan', '08077' : 'Mesa', '08079' : 'Mineral', '08081' : 'Moffat', '08083' : 'Montezuma', '08085' : 'Montrose', '08087' : 'Morgan', '08089' : 'Otero', '08091' : 'Ouray', '08093' : 'Park', '08095' : 'Phillips', '08097' : 'Pitkin', '08099' : 'Prowers', '08101' : 'Pueblo', '08103' : 'Rio Blanco', '08105' : 'Rio Grande', '08107' : 'Routt', '08109' : 'Saguache', '08111' : 'San Juan', '08113' : 'San Miguel', '08115' : 'Sedgwick', '08117' : 'Summit', '08119' : 'Teller', '08121' : 'Washington', '08123' : 'Weld', '08125' : 'Yuma', '09001' : 'Fairfield', '09003' : 'Hartford', '09005' : 'Litchfield', '09007' : 'Middlesex', '09009' : 'New Haven', '09011' : 'New London', '09013' : 'Tolland', '09015' : 'Windham', '10001' : 'Kent', '10003' : 'New Castle', '10005' : 'Sussex', '11001' : 'District of Columbia', '12001' : 'Alachua', '12003' : 'Baker', '12005' : 'Bay', '12007' : 'Bradford', '12009' : 'Brevard', '12011' : 'Broward', '12013' : 'Calhoun', '12015' : 'Charlotte', '12017' : 'Citrus', '12019' : 'Clay', '12021' : 'Collier', '12023' : 'Columbia', '12027' : 'Desoto', '12029' : 'Dixie', '12031' : 'Duval', '12033' : 'Escambia', '12035' : 'Flagler', '12037' : 'Franklin', '12039' : 'Gadsden', '12041' : 'Gilchrist', '12043' : 'Glades', '12045' : 'Gulf', '12047' : 'Hamilton', '12049' : 'Hardee', '12051' : 'Hendry', '12053' : 'Hernando', '12055' : 'Highlands', '12057' : 'Hillsborough', '12059' : 'Holmes', '12061' : 'Indian River', '12063' : 'Jackson', '12065' : 'Jefferson', '12067' : 'Lafayette', '12069' : 'Lake', '12071' : 'Lee', '12073' : 'Leon', '12075' : 'Levy', '12077' : 'Liberty', '12079' : 'Madison', '12081' : 'Manatee', '12083' : 'Marion', '12085' : 'Martin', '12086' : 'Miami-Dade', '12087' : 'Monroe', '12089' : 'Nassau', '12091' : 'Okaloosa', '12093' : 'Okeechobee', '12095' : 'Orange', '12097' : 'Osceola', '12099' : 'Palm Beach', '12101' : 'Pasco', '12103' : 'Pinellas', '12105' : 'Polk', '12107' : 'Putnam', '12109' : 'Saint Johns', '12111' : 'Saint Lucie', '12113' : 'Santa Rosa', '12115' : 'Sarasota', '12117' : 'Seminole', '12119' : 'Sumter', '12121' : 'Suwannee', '12123' : 'Taylor', '12125' : 'Union', '12127' : 'Volusia', '12129' : 'Wakulla', '12131' : 'Walton', '12133' : 'Washington', '13001' : 'Appling', '13003' : 'Atkinson', '13005' : 'Bacon', '13007' : 'Baker', '13009' : 'Baldwin', '13011' : 'Banks', '13013' : 'Barrow', '13015' : 'Bartow', '13017' : 'Ben Hill', '13019' : 'Berrien', '13021' : 'Bibb', '13023' : 'Bleckley', '13025' : 'Brantley', '13027' : 'Brooks', '13029' : 'Bryan', '13031' : 'Bulloch', '13033' : 'Burke', '13035' : 'Butts', '13037' : 'Calhoun', '13039' : 'Camden', '13043' : 'Candler', '13045' : 'Carroll', '13047' : 'Catoosa', '13049' : 'Charlton', '13051' : 'Chatham', '13053' : 'Chattahoochee', '13055' : 'Chattooga', '13057' : 'Cherokee', '13059' : 'Clarke', '13061' : 'Clay', '13063' : 'Clayton', '13065' : 'Clinch', '13067' : 'Cobb', '13069' : 'Coffee', '13071' : 'Colquitt', '13073' : 'Columbia', '13075' : 'Cook', '13077' : 'Coweta', '13079' : 'Crawford', '13081' : 'Crisp', '13083' : 'Dade', '13085' : 'Dawson', '13087' : 'Decatur', '13089' : 'Dekalb', '13091' : 'Dodge', '13093' : 'Dooly', '13095' : 'Dougherty', '13097' : 'Douglas', '13099' : 'Early', '13101' : 'Echols', '13103' : 'Effingham', '13105' : 'Elbert', '13107' : 'Emanuel', '13109' : 'Evans', '13111' : 'Fannin', '13113' : 'Fayette', '13115' : 'Floyd', '13117' : 'Forsyth', '13119' : 'Franklin', '13121' : 'Fulton', '13123' : 'Gilmer', '13125' : 'Glascock', '13127' : 'Glynn', '13129' : 'Gordon', '13131' : 'Grady', '13133' : 'Greene', '13135' : 'Gwinnett', '13137' : 'Habersham', '13139' : 'Hall', '13141' : 'Hancock', '13143' : 'Haralson', '13145' : 'Harris', '13147' : 'Hart', '13149' : 'Heard', '13151' : 'Henry', '13153' : 'Houston', '13155' : 'Irwin', '13157' : 'Jackson', '13159' : 'Jasper', '13161' : 'Jeff Davis', '13163' : 'Jefferson', '13165' : 'Jenkins', '13167' : 'Johnson', '13169' : 'Jones', '13171' : 'Lamar', '13173' : 'Lanier', '13175' : 'Laurens', '13177' : 'Lee', '13179' : 'Liberty', '13181' : 'Lincoln', '13183' : 'Long', '13185' : 'Lowndes', '13187' : 'Lumpkin', '13189' : 'Mcduffie', '13191' : 'Mcintosh', '13193' : 'Macon', '13195' : 'Madison', '13197' : 'Marion', '13199' : 'Meriwether', '13201' : 'Miller', '13205' : 'Mitchell', '13207' : 'Monroe', '13209' : 'Montgomery', '13211' : 'Morgan', '13213' : 'Murray', '13215' : 'Muscogee', '13217' : 'Newton', '13219' : 'Oconee', '13221' : 'Oglethorpe', '13223' : 'Paulding', '13225' : 'Peach', '13227' : 'Pickens', '13229' : 'Pierce', '13231' : 'Pike', '13233' : 'Polk', '13235' : 'Pulaski', '13237' : 'Putnam', '13239' : 'Quitman', '13241' : 'Rabun', '13243' : 'Randolph', '13245' : 'Richmond', '13247' : 'Rockdale', '13249' : 'Schley', '13251' : 'Screven', '13253' : 'Seminole', '13255' : 'Spalding', '13257' : 'Stephens', '13259' : 'Stewart', '13261' : 'Sumter', '13263' : 'Talbot', '13265' : 'Taliaferro', '13267' : 'Tattnall', '13269' : 'Taylor', '13271' : 'Telfair', '13273' : 'Terrell', '13275' : 'Thomas', '13277' : 'Tift', '13279' : 'Toombs', '13281' : 'Towns', '13283' : 'Treutlen', '13285' : 'Troup', '13287' : 'Turner', '13289' : 'Twiggs', '13291' : 'Union', '13293' : 'Upson', '13295' : 'Walker', '13297' : 'Walton', '13299' : 'Ware', '13301' : 'Warren', '13303' : 'Washington', '13305' : 'Wayne', '13307' : 'Webster', '13309' : 'Wheeler', '13311' : 'White', '13313' : 'Whitfield', '13315' : 'Wilcox', '13317' : 'Wilkes', '13319' : 'Wilkinson', '13321' : 'Worth', '15001' : 'Hawaii', '15003' : 'Honolulu', '15005' : 'Kalawao', '15007' : 'Kauai', '15009' : 'Maui', '16001' : 'Ada', '16003' : 'Adams', '16005' : 'Bannock', '16007' : 'Bear Lake', '16009' : 'Benewah', '16011' : 'Bingham', '16013' : 'Blaine', '16015' : 'Boise', '16017' : 'Bonner', '16019' : 'Bonneville', '16021' : 'Boundary', '16023' : 'Butte', '16025' : 'Camas', '16027' : 'Canyon', '16029' : 'Caribou', '16031' : 'Cassia', '16033' : 'Clark', '16035' : 'Clearwater', '16037' : 'Custer', '16039' : 'Elmore', '16041' : 'Franklin', '16043' : 'Fremont', '16045' : 'Gem', '16047' : 'Gooding', '16049' : 'Idaho', '16051' : 'Jefferson', '16053' : 'Jerome', '16055' : 'Kootenai', '16057' : 'Latah', '16059' : 'Lemhi', '16061' : 'Lewis', '16063' : 'Lincoln', '16065' : 'Madison', '16067' : 'Minidoka', '16069' : 'Nez Perce', '16071' : 'Oneida', '16073' : 'Owyhee', '16075' : 'Payette', '16077' : 'Power', '16079' : 'Shoshone', '16081' : 'Teton', '16083' : 'Twin Falls', '16085' : 'Valley', '16087' : 'Washington', '17001' : 'Adams', '17003' : 'Alexander', '17005' : 'Bond', '17007' : 'Boone', '17009' : 'Brown', '17011' : 'Bureau', '17013' : 'Calhoun', '17015' : 'Carroll', '17017' : 'Cass', '17019' : 'Champaign', '17021' : 'Christian', '17023' : 'Clark', '17025' : 'Clay', '17027' : 'Clinton', '17029' : 'Coles', '17031' : 'Cook', '17033' : 'Crawford', '17035' : 'Cumberland', '17037' : 'Dekalb', '17039' : 'De Witt', '17041' : 'Douglas', '17043' : 'Dupage', '17045' : 'Edgar', '17047' : 'Edwards', '17049' : 'Effingham', '17051' : 'Fayette', '17053' : 'Ford', '17055' : 'Franklin', '17057' : 'Fulton', '17059' : 'Gallatin', '17061' : 'Greene', '17063' : 'Grundy', '17065' : 'Hamilton', '17067' : 'Hancock', '17069' : 'Hardin', '17071' : 'Henderson', '17073' : 'Henry', '17075' : 'Iroquois', '17077' : 'Jackson', '17079' : 'Jasper', '17081' : 'Jefferson', '17083' : 'Jersey', '17085' : 'Jo Daviess', '17087' : 'Johnson', '17089' : 'Kane', '17091' : 'Kankakee', '17093' : 'Kendall', '17095' : 'Knox', '17097' : 'Lake', '17099' : 'La Salle', '17101' : 'Lawrence', '17103' : 'Lee', '17105' : 'Livingston', '17107' : 'Logan', '17109' : 'Mcdonough', '17111' : 'Mchenry', '17113' : 'Mclean', '17115' : 'Macon', '17117' : 'Macoupin', '17119' : 'Madison', '17121' : 'Marion', '17123' : 'Marshall', '17125' : 'Mason', '17127' : 'Massac', '17129' : 'Menard', '17131' : 'Mercer', '17133' : 'Monroe', '17135' : 'Montgomery', '17137' : 'Morgan', '17139' : 'Moultrie', '17141' : 'Ogle', '17143' : 'Peoria', '17145' : 'Perry', '17147' : 'Piatt', '17149' : 'Pike', '17151' : 'Pope', '17153' : 'Pulaski', '17155' : 'Putnam', '17157' : 'Randolph', '17159' : 'Richland', '17161' : 'Rock Island', '17163' : 'Saint Clair', '17165' : 'Saline', '17167' : 'Sangamon', '17169' : 'Schuyler', '17171' : 'Scott', '17173' : 'Shelby', '17175' : 'Stark', '17177' : 'Stephenson', '17179' : 'Tazewell', '17181' : 'Union', '17183' : 'Vermilion', '17185' : 'Wabash', '17187' : 'Warren', '17189' : 'Washington', '17191' : 'Wayne', '17193' : 'White', '17195' : 'Whiteside', '17197' : 'Will', '17199' : 'Williamson', '17201' : 'Winnebago', '17203' : 'Woodford', '18001' : 'Adams', '18003' : 'Allen', '18005' : 'Bartholomew', '18007' : 'Benton', '18009' : 'Blackford', '18011' : 'Boone', '18013' : 'Brown', '18015' : 'Carroll', '18017' : 'Cass', '18019' : 'Clark', '18021' : 'Clay', '18023' : 'Clinton', '18025' : 'Crawford', '18027' : 'Daviess', '18029' : 'Dearborn', '18031' : 'Decatur', '18033' : 'De Kalb', '18035' : 'Delaware', '18037' : 'Dubois', '18039' : 'Elkhart', '18041' : 'Fayette', '18043' : 'Floyd', '18045' : 'Fountain', '18047' : 'Franklin', '18049' : 'Fulton', '18051' : 'Gibson', '18053' : 'Grant', '18055' : 'Greene', '18057' : 'Hamilton', '18059' : 'Hancock', '18061' : 'Harrison', '18063' : 'Hendricks', '18065' : 'Henry', '18067' : 'Howard', '18069' : 'Huntington', '18071' : 'Jackson', '18073' : 'Jasper', '18075' : 'Jay', '18077' : 'Jefferson', '18079' : 'Jennings', '18081' : 'Johnson', '18083' : 'Knox', '18085' : 'Kosciusko', '18087' : 'Lagrange', '18089' : 'Lake', '18091' : 'La Porte', '18093' : 'Lawrence', '18095' : 'Madison', '18097' : 'Marion', '18099' : 'Marshall', '18101' : 'Martin', '18103' : 'Miami', '18105' : 'Monroe', '18107' : 'Montgomery', '18109' : 'Morgan', '18111' : 'Newton', '18113' : 'Noble', '18115' : 'Ohio', '18117' : 'Orange', '18119' : 'Owen', '18121' : 'Parke', '18123' : 'Perry', '18125' : 'Pike', '18127' : 'Porter', '18129' : 'Posey', '18131' : 'Pulaski', '18133' : 'Putnam', '18135' : 'Randolph', '18137' : 'Ripley', '18139' : 'Rush', '18141' : 'Saint Joseph', '18143' : 'Scott', '18145' : 'Shelby', '18147' : 'Spencer', '18149' : 'Starke', '18151' : 'Steuben', '18153' : 'Sullivan', '18155' : 'Switzerland', '18157' : 'Tippecanoe', '18159' : 'Tipton', '18161' : 'Union', '18163' : 'Vanderburgh', '18165' : 'Vermillion', '18167' : 'Vigo', '18169' : 'Wabash', '18171' : 'Warren', '18173' : 'Warrick', '18175' : 'Washington', '18177' : 'Wayne', '18179' : 'Wells', '18181' : 'White', '18183' : 'Whitley', '19001' : 'Adair', '19003' : 'Adams', '19005' : 'Allamakee', '19007' : 'Appanoose', '19009' : 'Audubon', '19011' : 'Benton', '19013' : 'Black Hawk', '19015' : 'Boone', '19017' : 'Bremer', '19019' : 'Buchanan', '19021' : 'Buena Vista', '19023' : 'Butler', '19025' : 'Calhoun', '19027' : 'Carroll', '19029' : 'Cass', '19031' : 'Cedar', '19033' : 'Cerro Gordo', '19035' : 'Cherokee', '19037' : 'Chickasaw', '19039' : 'Clarke', '19041' : 'Clay', '19043' : 'Clayton', '19045' : 'Clinton', '19047' : 'Crawford', '19049' : 'Dallas', '19051' : 'Davis', '19053' : 'Decatur', '19055' : 'Delaware', '19057' : 'Des Moines', '19059' : 'Dickinson', '19061' : 'Dubuque', '19063' : 'Emmet', '19065' : 'Fayette', '19067' : 'Floyd', '19069' : 'Franklin', '19071' : 'Fremont', '19073' : 'Greene', '19075' : 'Grundy', '19077' : 'Guthrie', '19079' : 'Hamilton', '19081' : 'Hancock', '19083' : 'Hardin', '19085' : 'Harrison', '19087' : 'Henry', '19089' : 'Howard', '19091' : 'Humboldt', '19093' : 'Ida', '19095' : 'Iowa', '19097' : 'Jackson', '19099' : 'Jasper', '19101' : 'Jefferson', '19103' : 'Johnson', '19105' : 'Jones', '19107' : 'Keokuk', '19109' : 'Kossuth', '19111' : 'Lee', '19113' : 'Linn', '19115' : 'Louisa', '19117' : 'Lucas', '19119' : 'Lyon', '19121' : 'Madison', '19123' : 'Mahaska', '19125' : 'Marion', '19127' : 'Marshall', '19129' : 'Mills', '19131' : 'Mitchell', '19133' : 'Monona', '19135' : 'Monroe', '19137' : 'Montgomery', '19139' : 'Muscatine', '19141' : 'O\'Brien', '19143' : 'Osceola', '19145' : 'Page', '19147' : 'Palo Alto', '19149' : 'Plymouth', '19151' : 'Pocahontas', '19153' : 'Polk', '19155' : 'Pottawattamie', '19157' : 'Poweshiek', '19159' : 'Ringgold', '19161' : 'Sac', '19163' : 'Scott', '19165' : 'Shelby', '19167' : 'Sioux', '19169' : 'Story', '19171' : 'Tama', '19173' : 'Taylor', '19175' : 'Union', '19177' : 'Van Buren', '19179' : 'Wapello', '19181' : 'Warren', '19183' : 'Washington', '19185' : 'Wayne', '19187' : 'Webster', '19189' : 'Winnebago', '19191' : 'Winneshiek', '19193' : 'Woodbury', '19195' : 'Worth', '19197' : 'Wright', '20001' : 'Allen', '20003' : 'Anderson', '20005' : 'Atchison', '20007' : 'Barber', '20009' : 'Barton', '20011' : 'Bourbon', '20013' : 'Brown', '20015' : 'Butler', '20017' : 'Chase', '20019' : 'Chautauqua', '20021' : 'Cherokee', '20023' : 'Cheyenne', '20025' : 'Clark', '20027' : 'Clay', '20029' : 'Cloud', '20031' : 'Coffey', '20033' : 'Comanche', '20035' : 'Cowley', '20037' : 'Crawford', '20039' : 'Decatur', '20041' : 'Dickinson', '20043' : 'Doniphan', '20045' : 'Douglas', '20047' : 'Edwards', '20049' : 'Elk', '20051' : 'Ellis', '20053' : 'Ellsworth', '20055' : 'Finney', '20057' : 'Ford', '20059' : 'Franklin', '20061' : 'Geary', '20063' : 'Gove', '20065' : 'Graham', '20067' : 'Grant', '20069' : 'Gray', '20071' : 'Greeley', '20073' : 'Greenwood', '20075' : 'Hamilton', '20077' : 'Harper', '20079' : 'Harvey', '20081' : 'Haskell', '20083' : 'Hodgeman', '20085' : 'Jackson', '20087' : 'Jefferson', '20089' : 'Jewell', '20091' : 'Johnson', '20093' : 'Kearny', '20095' : 'Kingman', '20097' : 'Kiowa', '20099' : 'Labette', '20101' : 'Lane', '20103' : 'Leavenworth', '20105' : 'Lincoln', '20107' : 'Linn', '20109' : 'Logan', '20111' : 'Lyon', '20113' : 'Mcpherson', '20115' : 'Marion', '20117' : 'Marshall', '20119' : 'Meade', '20121' : 'Miami', '20123' : 'Mitchell', '20125' : 'Montgomery', '20127' : 'Morris', '20129' : 'Morton', '20131' : 'Nemaha', '20133' : 'Neosho', '20135' : 'Ness', '20137' : 'Norton', '20139' : 'Osage', '20141' : 'Osborne', '20143' : 'Ottawa', '20145' : 'Pawnee', '20147' : 'Phillips', '20149' : 'Pottawatomie', '20151' : 'Pratt', '20153' : 'Rawlins', '20155' : 'Reno', '20157' : 'Republic', '20159' : 'Rice', '20161' : 'Riley', '20163' : 'Rooks', '20165' : 'Rush', '20167' : 'Russell', '20169' : 'Saline', '20171' : 'Scott', '20173' : 'Sedgwick', '20175' : 'Seward', '20177' : 'Shawnee', '20179' : 'Sheridan', '20181' : 'Sherman', '20183' : 'Smith', '20185' : 'Stafford', '20187' : 'Stanton', '20189' : 'Stevens', '20191' : 'Sumner', '20193' : 'Thomas', '20195' : 'Trego', '20197' : 'Wabaunsee', '20199' : 'Wallace', '20201' : 'Washington', '20203' : 'Wichita', '20205' : 'Wilson', '20207' : 'Woodson', '20209' : 'Wyandotte', '21001' : 'Adair', '21003' : 'Allen', '21005' : 'Anderson', '21007' : 'Ballard', '21009' : 'Barren', '21011' : 'Bath', '21013' : 'Bell', '21015' : 'Boone', '21017' : 'Bourbon', '21019' : 'Boyd', '21021' : 'Boyle', '21023' : 'Bracken', '21025' : 'Breathitt', '21027' : 'Breckinridge', '21029' : 'Bullitt', '21031' : 'Butler', '21033' : 'Caldwell', '21035' : 'Calloway', '21037' : 'Campbell', '21039' : 'Carlisle', '21041' : 'Carroll', '21043' : 'Carter', '21045' : 'Casey', '21047' : 'Christian', '21049' : 'Clark', '21051' : 'Clay', '21053' : 'Clinton', '21055' : 'Crittenden', '21057' : 'Cumberland', '21059' : 'Daviess', '21061' : 'Edmonson', '21063' : 'Elliott', '21065' : 'Estill', '21067' : 'Fayette', '21069' : 'Fleming', '21071' : 'Floyd', '21073' : 'Franklin', '21075' : 'Fulton', '21077' : 'Gallatin', '21079' : 'Garrard', '21081' : 'Grant', '21083' : 'Graves', '21085' : 'Grayson', '21087' : 'Green', '21089' : 'Greenup', '21091' : 'Hancock', '21093' : 'Hardin', '21095' : 'Harlan', '21097' : 'Harrison', '21099' : 'Hart', '21101' : 'Henderson', '21103' : 'Henry', '21105' : 'Hickman', '21107' : 'Hopkins', '21109' : 'Jackson', '21111' : 'Jefferson', '21113' : 'Jessamine', '21115' : 'Johnson', '21117' : 'Kenton', '21119' : 'Knott', '21121' : 'Knox', '21123' : 'Larue', '21125' : 'Laurel', '21127' : 'Lawrence', '21129' : 'Lee', '21131' : 'Leslie', '21133' : 'Letcher', '21135' : 'Lewis', '21137' : 'Lincoln', '21139' : 'Livingston', '21141' : 'Logan', '21143' : 'Lyon', '21145' : 'Mccracken', '21147' : 'Mccreary', '21149' : 'Mclean', '21151' : 'Madison', '21153' : 'Magoffin', '21155' : 'Marion', '21157' : 'Marshall', '21159' : 'Martin', '21161' : 'Mason', '21163' : 'Meade', '21165' : 'Menifee', '21167' : 'Mercer', '21169' : 'Metcalfe', '21171' : 'Monroe', '21173' : 'Montgomery', '21175' : 'Morgan', '21177' : 'Muhlenberg', '21179' : 'Nelson', '21181' : 'Nicholas', '21183' : 'Ohio', '21185' : 'Oldham', '21187' : 'Owen', '21189' : 'Owsley', '21191' : 'Pendleton', '21193' : 'Perry', '21195' : 'Pike', '21197' : 'Powell', '21199' : 'Pulaski', '21201' : 'Robertson', '21203' : 'Rockcastle', '21205' : 'Rowan', '21207' : 'Russell', '21209' : 'Scott', '21211' : 'Shelby', '21213' : 'Simpson', '21215' : 'Spencer', '21217' : 'Taylor', '21219' : 'Todd', '21221' : 'Trigg', '21223' : 'Trimble', '21225' : 'Union', '21227' : 'Warren', '21229' : 'Washington', '21231' : 'Wayne', '21233' : 'Webster', '21235' : 'Whitley', '21237' : 'Wolfe', '21239' : 'Woodford', '22001' : 'Acadia', '22003' : 'Allen', '22005' : 'Ascension', '22007' : 'Assumption', '22009' : 'Avoyelles', '22011' : 'Beauregard', '22013' : 'Bienville', '22015' : 'Bossier', '22017' : 'Caddo', '22019' : 'Calcasieu', '22021' : 'Caldwell', '22023' : 'Cameron', '22025' : 'Catahoula', '22027' : 'Claiborne', '22029' : 'Concordia', '22031' : 'De Soto', '22033' : 'East Baton Rouge', '22035' : 'East Carroll', '22037' : 'East Feliciana', '22039' : 'Evangeline', '22041' : 'Franklin', '22043' : 'Grant', '22045' : 'Iberia', '22047' : 'Iberville', '22049' : 'Jackson', '22051' : 'Jefferson', '22053' : 'Jefferson Davis', '22055' : 'Lafayette', '22057' : 'Lafourche', '22059' : 'La Salle', '22061' : 'Lincoln', '22063' : 'Livingston', '22065' : 'Madison', '22067' : 'Morehouse', '22069' : 'Natchitoches', '22071' : 'Orleans', '22073' : 'Ouachita', '22075' : 'Plaquemines', '22077' : 'Pointe Coupee', '22079' : 'Rapides', '22081' : 'Red River', '22083' : 'Richland', '22085' : 'Sabine', '22087' : 'Saint Bernard', '22089' : 'Saint Charles', '22091' : 'Saint Helena', '22093' : 'Saint James', '22095' : 'Saint John the Baptist', '22097' : 'Saint Landry', '22099' : 'Saint Martin', '22101' : 'Saint Mary', '22103' : 'Saint Tammany', '22105' : 'Tangipahoa', '22107' : 'Tensas', '22109' : 'Terrebonne', '22111' : 'Union', '22113' : 'Vermilion', '22115' : 'Vernon', '22117' : 'Washington', '22119' : 'Webster', '22121' : 'West Baton Rouge', '22123' : 'West Carroll', '22125' : 'West Feliciana', '22127' : 'Winn', '23001' : 'Androscoggin', '23003' : 'Aroostook', '23005' : 'Cumberland', '23007' : 'Franklin', '23009' : 'Hancock', '23011' : 'Kennebec', '23013' : 'Knox', '23015' : 'Lincoln', '23017' : 'Oxford', '23019' : 'Penobscot', '23021' : 'Piscataquis', '23023' : 'Sagadahoc', '23025' : 'Somerset', '23027' : 'Waldo', '23029' : 'Washington', '23031' : 'York', '24001' : 'Allegany', '24003' : 'Anne Arundel', '24005' : 'Baltimore', '24009' : 'Calvert', '24011' : 'Caroline', '24013' : 'Carroll', '24015' : 'Cecil', '24017' : 'Charles', '24019' : 'Dorchester', '24021' : 'Frederick', '24023' : 'Garrett', '24025' : 'Harford', '24027' : 'Howard', '24029' : 'Kent', '24031' : 'Montgomery', '24033' : 'Prince George\'s', '24035' : 'Queen Anne\'s', '24037' : 'Saint Mary\'s', '24039' : 'Somerset', '24041' : 'Talbot', '24043' : 'Washington', '24045' : 'Wicomico', '24047' : 'Worcester', '24510' : 'City of Baltimore', '25001' : 'Barnstable', '25003' : 'Berkshire', '25005' : 'Bristol', '25007' : 'Dukes', '25009' : 'Essex', '25011' : 'Franklin', '25013' : 'Hampden', '25015' : 'Hampshire', '25017' : 'Middlesex', '25019' : 'Nantucket', '25021' : 'Norfolk', '25023' : 'Plymouth', '25025' : 'Suffolk', '25027' : 'Worcester', '26001' : 'Alcona', '26003' : 'Alger', '26005' : 'Allegan', '26007' : 'Alpena', '26009' : 'Antrim', '26011' : 'Arenac', '26013' : 'Baraga', '26015' : 'Barry', '26017' : 'Bay', '26019' : 'Benzie', '26021' : 'Berrien', '26023' : 'Branch', '26025' : 'Calhoun', '26027' : 'Cass', '26029' : 'Charlevoix', '26031' : 'Cheboygan', '26033' : 'Chippewa', '26035' : 'Clare', '26037' : 'Clinton', '26039' : 'Crawford', '26041' : 'Delta', '26043' : 'Dickinson', '26045' : 'Eaton', '26047' : 'Emmet', '26049' : 'Genesee', '26051' : 'Gladwin', '26053' : 'Gogebic', '26055' : 'Grand Traverse', '26057' : 'Gratiot', '26059' : 'Hillsdale', '26061' : 'Houghton', '26063' : 'Huron', '26065' : 'Ingham', '26067' : 'Ionia', '26069' : 'Iosco', '26071' : 'Iron', '26073' : 'Isabella', '26075' : 'Jackson', '26077' : 'Kalamazoo', '26079' : 'Kalkaska', '26081' : 'Kent', '26083' : 'Keweenaw', '26085' : 'Lake', '26087' : 'Lapeer', '26089' : 'Leelanau', '26091' : 'Lenawee', '26093' : 'Livingston', '26095' : 'Luce', '26097' : 'Mackinac', '26099' : 'Macomb', '26101' : 'Manistee', '26103' : 'Marquette', '26105' : 'Mason', '26107' : 'Mecosta', '26109' : 'Menominee', '26111' : 'Midland', '26113' : 'Missaukee', '26115' : 'Monroe', '26117' : 'Montcalm', '26119' : 'Montmorency', '26121' : 'Muskegon', '26123' : 'Newaygo', '26125' : 'Oakland', '26127' : 'Oceana', '26129' : 'Ogemaw', '26131' : 'Ontonagon', '26133' : 'Osceola', '26135' : 'Oscoda', '26137' : 'Otsego', '26139' : 'Ottawa', '26141' : 'Presque Isle', '26143' : 'Roscommon', '26145' : 'Saginaw', '26147' : 'Saint Clair', '26149' : 'Saint Joseph', '26151' : 'Sanilac', '26153' : 'Schoolcraft', '26155' : 'Shiawassee', '26157' : 'Tuscola', '26159' : 'Van Buren', '26161' : 'Washtenaw', '26163' : 'Wayne', '26165' : 'Wexford', '27001' : 'Aitkin', '27003' : 'Anoka', '27005' : 'Becker', '27007' : 'Beltrami', '27009' : 'Benton', '27011' : 'Big Stone', '27013' : 'Blue Earth', '27015' : 'Brown', '27017' : 'Carlton', '27019' : 'Carver', '27021' : 'Cass', '27023' : 'Chippewa', '27025' : 'Chisago', '27027' : 'Clay', '27029' : 'Clearwater', '27031' : 'Cook', '27033' : 'Cottonwood', '27035' : 'Crow Wing', '27037' : 'Dakota', '27039' : 'Dodge', '27041' : 'Douglas', '27043' : 'Faribault', '27045' : 'Fillmore', '27047' : 'Freeborn', '27049' : 'Goodhue', '27051' : 'Grant', '27053' : 'Hennepin', '27055' : 'Houston', '27057' : 'Hubbard', '27059' : 'Isanti', '27061' : 'Itasca', '27063' : 'Jackson', '27065' : 'Kanabec', '27067' : 'Kandiyohi', '27069' : 'Kittson', '27071' : 'Koochiching', '27073' : 'Lac Qui Parle', '27075' : 'Lake', '27077' : 'Lake of the Woods', '27079' : 'Le Sueur', '27081' : 'Lincoln', '27083' : 'Lyon', '27085' : 'Mcleod', '27087' : 'Mahnomen', '27089' : 'Marshall', '27091' : 'Martin', '27093' : 'Meeker', '27095' : 'Mille Lacs', '27097' : 'Morrison', '27099' : 'Mower', '27101' : 'Murray', '27103' : 'Nicollet', '27105' : 'Nobles', '27107' : 'Norman', '27109' : 'Olmsted', '27111' : 'Otter Tail', '27113' : 'Pennington', '27115' : 'Pine', '27117' : 'Pipestone', '27119' : 'Polk', '27121' : 'Pope', '27123' : 'Ramsey', '27125' : 'Red Lake', '27127' : 'Redwood', '27129' : 'Renville', '27131' : 'Rice', '27133' : 'Rock', '27135' : 'Roseau', '27137' : 'Saint Louis', '27139' : 'Scott', '27141' : 'Sherburne', '27143' : 'Sibley', '27145' : 'Stearns', '27147' : 'Steele', '27149' : 'Stevens', '27151' : 'Swift', '27153' : 'Todd', '27155' : 'Traverse', '27157' : 'Wabasha', '27159' : 'Wadena', '27161' : 'Waseca', '27163' : 'Washington', '27165' : 'Watonwan', '27167' : 'Wilkin', '27169' : 'Winona', '27171' : 'Wright', '27173' : 'Yellow Medicine', '28001' : 'Adams', '28003' : 'Alcorn', '28005' : 'Amite', '28007' : 'Attala', '28009' : 'Benton', '28011' : 'Bolivar', '28013' : 'Calhoun', '28015' : 'Carroll', '28017' : 'Chickasaw', '28019' : 'Choctaw', '28021' : 'Claiborne', '28023' : 'Clarke', '28025' : 'Clay', '28027' : 'Coahoma', '28029' : 'Copiah', '28031' : 'Covington', '28033' : 'Desoto', '28035' : 'Forrest', '28037' : 'Franklin', '28039' : 'George', '28041' : 'Greene', '28043' : 'Grenada', '28045' : 'Hancock', '28047' : 'Harrison', '28049' : 'Hinds', '28051' : 'Holmes', '28053' : 'Humphreys', '28055' : 'Issaquena', '28057' : 'Itawamba', '28059' : 'Jackson', '28061' : 'Jasper', '28063' : 'Jefferson', '28065' : 'Jefferson Davis', '28067' : 'Jones', '28069' : 'Kemper', '28071' : 'Lafayette', '28073' : 'Lamar', '28075' : 'Lauderdale', '28077' : 'Lawrence', '28079' : 'Leake', '28081' : 'Lee', '28083' : 'Leflore', '28085' : 'Lincoln', '28087' : 'Lowndes', '28089' : 'Madison', '28091' : 'Marion', '28093' : 'Marshall', '28095' : 'Monroe', '28097' : 'Montgomery', '28099' : 'Neshoba', '28101' : 'Newton', '28103' : 'Noxubee', '28105' : 'Oktibbeha', '28107' : 'Panola', '28109' : 'Pearl River', '28111' : 'Perry', '28113' : 'Pike', '28115' : 'Pontotoc', '28117' : 'Prentiss', '28119' : 'Quitman', '28121' : 'Rankin', '28123' : 'Scott', '28125' : 'Sharkey', '28127' : 'Simpson', '28129' : 'Smith', '28131' : 'Stone', '28133' : 'Sunflower', '28135' : 'Tallahatchie', '28137' : 'Tate', '28139' : 'Tippah', '28141' : 'Tishomingo', '28143' : 'Tunica', '28145' : 'Union', '28147' : 'Walthall', '28149' : 'Warren', '28151' : 'Washington', '28153' : 'Wayne', '28155' : 'Webster', '28157' : 'Wilkinson', '28159' : 'Winston', '28161' : 'Yalobusha', '28163' : 'Yazoo', '29001' : 'Adair', '29003' : 'Andrew', '29005' : 'Atchison', '29007' : 'Audrain', '29009' : 'Barry', '29011' : 'Barton', '29013' : 'Bates', '29015' : 'Benton', '29017' : 'Bollinger', '29019' : 'Boone', '29021' : 'Buchanan', '29023' : 'Butler', '29025' : 'Caldwell', '29027' : 'Callaway', '29029' : 'Camden', '29031' : 'Cape Girardeau', '29033' : 'Carroll', '29035' : 'Carter', '29037' : 'Cass', '29039' : 'Cedar', '29041' : 'Chariton', '29043' : 'Christian', '29045' : 'Clark', '29047' : 'Clay', '29049' : 'Clinton', '29051' : 'Cole', '29053' : 'Cooper', '29055' : 'Crawford', '29057' : 'Dade', '29059' : 'Dallas', '29061' : 'Daviess', '29063' : 'Dekalb', '29065' : 'Dent', '29067' : 'Douglas', '29069' : 'Dunklin', '29071' : 'Franklin', '29073' : 'Gasconade', '29075' : 'Gentry', '29077' : 'Greene', '29079' : 'Grundy', '29081' : 'Harrison', '29083' : 'Henry', '29085' : 'Hickory', '29087' : 'Holt', '29089' : 'Howard', '29091' : 'Howell', '29093' : 'Iron', '29095' : 'Jackson', '29097' : 'Jasper', '29099' : 'Jefferson', '29101' : 'Johnson', '29103' : 'Knox', '29105' : 'Laclede', '29107' : 'Lafayette', '29109' : 'Lawrence', '29111' : 'Lewis', '29113' : 'Lincoln', '29115' : 'Linn', '29117' : 'Livingston', '29119' : 'McDonald', '29121' : 'Macon', '29123' : 'Madison', '29125' : 'Maries', '29127' : 'Marion', '29129' : 'Mercer', '29131' : 'Miller', '29133' : 'Mississippi', '29135' : 'Moniteau', '29137' : 'Monroe', '29139' : 'Montgomery', '29141' : 'Morgan', '29143' : 'New Madrid', '29145' : 'Newton', '29147' : 'Nodaway', '29149' : 'Oregon', '29151' : 'Osage', '29153' : 'Ozark', '29155' : 'Pemiscot', '29157' : 'Perry', '29159' : 'Pettis', '29161' : 'Phelps', '29163' : 'Pike', '29165' : 'Platte', '29167' : 'Polk', '29169' : 'Pulaski', '29171' : 'Putnam', '29173' : 'Ralls', '29175' : 'Randolph', '29177' : 'Ray', '29179' : 'Reynolds', '29181' : 'Ripley', '29183' : 'Saint Charles', '29185' : 'Saint Clair', '29186' : 'Sainte Genevieve', '29187' : 'Saint Francois', '29189' : 'Saint Louis', '29195' : 'Saline', '29197' : 'Schuyler', '29199' : 'Scotland', '29201' : 'Scott', '29203' : 'Shannon', '29205' : 'Shelby', '29207' : 'Stoddard', '29209' : 'Stone', '29211' : 'Sullivan', '29213' : 'Taney', '29215' : 'Texas', '29217' : 'Vernon', '29219' : 'Warren', '29221' : 'Washington', '29223' : 'Wayne', '29225' : 'Webster', '29227' : 'Worth', '29229' : 'Wright', '29510' : 'City of Saint Louis', '30001' : 'Beaverhead', '30003' : 'Big Horn', '30005' : 'Blaine', '30007' : 'Broadwater', '30009' : 'Carbon', '30011' : 'Carter', '30013' : 'Cascade', '30015' : 'Chouteau', '30017' : 'Custer', '30019' : 'Daniels', '30021' : 'Dawson', '30023' : 'Deer Lodge', '30025' : 'Fallon', '30027' : 'Fergus', '30029' : 'Flathead', '30031' : 'Gallatin', '30033' : 'Garfield', '30035' : 'Glacier', '30037' : 'Golden Valley', '30039' : 'Granite', '30041' : 'Hill', '30043' : 'Jefferson', '30045' : 'Judith Basin', '30047' : 'Lake', '30049' : 'Lewis and Clark', '30051' : 'Liberty', '30053' : 'Lincoln', '30055' : 'Mccone', '30057' : 'Madison', '30059' : 'Meagher', '30061' : 'Mineral', '30063' : 'Missoula', '30065' : 'Musselshell', '30067' : 'Park', '30069' : 'Petroleum', '30071' : 'Phillips', '30073' : 'Pondera', '30075' : 'Powder River', '30077' : 'Powell', '30079' : 'Prairie', '30081' : 'Ravalli', '30083' : 'Richland', '30085' : 'Roosevelt', '30087' : 'Rosebud', '30089' : 'Sanders', '30091' : 'Sheridan', '30093' : 'Silver Bow', '30095' : 'Stillwater', '30097' : 'Sweet Grass', '30099' : 'Teton', '30101' : 'Toole', '30103' : 'Treasure', '30105' : 'Valley', '30107' : 'Wheatland', '30109' : 'Wibaux', '30111' : 'Yellowstone', '31001' : 'Adams', '31003' : 'Antelope', '31005' : 'Arthur', '31007' : 'Banner', '31009' : 'Blaine', '31011' : 'Boone', '31013' : 'Box Butte', '31015' : 'Boyd', '31017' : 'Brown', '31019' : 'Buffalo', '31021' : 'Burt', '31023' : 'Butler', '31025' : 'Cass', '31027' : 'Cedar', '31029' : 'Chase', '31031' : 'Cherry', '31033' : 'Cheyenne', '31035' : 'Clay', '31037' : 'Colfax', '31039' : 'Cuming', '31041' : 'Custer', '31043' : 'Dakota', '31045' : 'Dawes', '31047' : 'Dawson', '31049' : 'Deuel', '31051' : 'Dixon', '31053' : 'Dodge', '31055' : 'Douglas', '31057' : 'Dundy', '31059' : 'Fillmore', '31061' : 'Franklin', '31063' : 'Frontier', '31065' : 'Furnas', '31067' : 'Gage', '31069' : 'Garden', '31071' : 'Garfield', '31073' : 'Gosper', '31075' : 'Grant', '31077' : 'Greeley', '31079' : 'Hall', '31081' : 'Hamilton', '31083' : 'Harlan', '31085' : 'Hayes', '31087' : 'Hitchcock', '31089' : 'Holt', '31091' : 'Hooker', '31093' : 'Howard', '31095' : 'Jefferson', '31097' : 'Johnson', '31099' : 'Kearney', '31101' : 'Keith', '31103' : 'Keya Paha', '31105' : 'Kimball', '31107' : 'Knox', '31109' : 'Lancaster', '31111' : 'Lincoln', '31113' : 'Logan', '31115' : 'Loup', '31117' : 'Mcpherson', '31119' : 'Madison', '31121' : 'Merrick', '31123' : 'Morrill', '31125' : 'Nance', '31127' : 'Nemaha', '31129' : 'Nuckolls', '31131' : 'Otoe', '31133' : 'Pawnee', '31135' : 'Perkins', '31137' : 'Phelps', '31139' : 'Pierce', '31141' : 'Platte', '31143' : 'Polk', '31145' : 'Red Willow', '31147' : 'Richardson', '31149' : 'Rock', '31151' : 'Saline', '31153' : 'Sarpy', '31155' : 'Saunders', '31157' : 'Scotts Bluff', '31159' : 'Seward', '31161' : 'Sheridan', '31163' : 'Sherman', '31165' : 'Sioux', '31167' : 'Stanton', '31169' : 'Thayer', '31171' : 'Thomas', '31173' : 'Thurston', '31175' : 'Valley', '31177' : 'Washington', '31179' : 'Wayne', '31181' : 'Webster', '31183' : 'Wheeler', '31185' : 'York', '32001' : 'Churchill', '32003' : 'Clark', '32005' : 'Douglas', '32007' : 'Elko', '32009' : 'Esmeralda', '32011' : 'Eureka', '32013' : 'Humboldt', '32015' : 'Lander', '32017' : 'Lincoln', '32019' : 'Lyon', '32021' : 'Mineral', '32023' : 'Nye', '32027' : 'Pershing', '32029' : 'Storey', '32031' : 'Washoe', '32033' : 'White Pine', '32510' : 'City of Carson', '33001' : 'Belknap', '33003' : 'Carroll', '33005' : 'Cheshire', '33007' : 'Coos', '33009' : 'Grafton', '33011' : 'Hillsborough', '33013' : 'Merrimack', '33015' : 'Rockingham', '33017' : 'Strafford', '33019' : 'Sullivan', '34001' : 'Atlantic', '34003' : 'Bergen', '34005' : 'Burlington', '34007' : 'Camden', '34009' : 'Cape May', '34011' : 'Cumberland', '34013' : 'Essex', '34015' : 'Gloucester', '34017' : 'Hudson', '34019' : 'Hunterdon', '34021' : 'Mercer', '34023' : 'Middlesex', '34025' : 'Monmouth', '34027' : 'Morris', '34029' : 'Ocean', '34031' : 'Passaic', '34033' : 'Salem', '34035' : 'Somerset', '34037' : 'Sussex', '34039' : 'Union', '34041' : 'Warren', '35001' : 'Bernalillo', '35003' : 'Catron', '35005' : 'Chaves', '35006' : 'Cibola', '35007' : 'Colfax', '35009' : 'Curry', '35011' : 'Debaca', '35013' : 'Dona Ana', '35015' : 'Eddy', '35017' : 'Grant', '35019' : 'Guadalupe', '35021' : 'Harding', '35023' : 'Hidalgo', '35025' : 'Lea', '35027' : 'Lincoln', '35028' : 'Los Alamos', '35029' : 'Luna', '35031' : 'Mckinley', '35033' : 'Mora', '35035' : 'Otero', '35037' : 'Quay', '35039' : 'Rio Arriba', '35041' : 'Roosevelt', '35043' : 'Sandoval', '35045' : 'San Juan', '35047' : 'San Miguel', '35049' : 'Santa Fe', '35051' : 'Sierra', '35053' : 'Socorro', '35055' : 'Taos', '35057' : 'Torrance', '35059' : 'Union', '35061' : 'Valencia', '36001' : 'Albany', '36003' : 'Allegany', '36005' : 'Bronx', '36007' : 'Broome', '36009' : 'Cattaraugus', '36011' : 'Cayuga', '36013' : 'Chautauqua', '36015' : 'Chemung', '36017' : 'Chenango', '36019' : 'Clinton', '36021' : 'Columbia', '36023' : 'Cortland', '36025' : 'Delaware', '36027' : 'Dutchess', '36029' : 'Erie', '36031' : 'Essex', '36033' : 'Franklin', '36035' : 'Fulton', '36037' : 'Genesee', '36039' : 'Greene', '36041' : 'Hamilton', '36043' : 'Herkimer', '36045' : 'Jefferson', '36047' : 'Kings', '36049' : 'Lewis', '36051' : 'Livingston', '36053' : 'Madison', '36055' : 'Monroe', '36057' : 'Montgomery', '36059' : 'Nassau', '36061' : 'New York', '36063' : 'Niagara', '36065' : 'Oneida', '36067' : 'Onondaga', '36069' : 'Ontario', '36071' : 'Orange', '36073' : 'Orleans', '36075' : 'Oswego', '36077' : 'Otsego', '36079' : 'Putnam', '36081' : 'Queens', '36083' : 'Rensselaer', '36085' : 'Richmond', '36087' : 'Rockland', '36089' : 'Saint Lawrence', '36091' : 'Saratoga', '36093' : 'Schenectady', '36095' : 'Schoharie', '36097' : 'Schuyler', '36099' : 'Seneca', '36101' : 'Steuben', '36103' : 'Suffolk', '36105' : 'Sullivan', '36107' : 'Tioga', '36109' : 'Tompkins', '36111' : 'Ulster', '36113' : 'Warren', '36115' : 'Washington', '36117' : 'Wayne', '36119' : 'Westchester', '36121' : 'Wyoming', '36123' : 'Yates', '37001' : 'Alamance', '37003' : 'Alexander', '37005' : 'Alleghany', '37007' : 'Anson', '37009' : 'Ashe', '37011' : 'Avery', '37013' : 'Beaufort', '37015' : 'Bertie', '37017' : 'Bladen', '37019' : 'Brunswick', '37021' : 'Buncombe', '37023' : 'Burke', '37025' : 'Cabarrus', '37027' : 'Caldwell', '37029' : 'Camden', '37031' : 'Carteret', '37033' : 'Caswell', '37035' : 'Catawba', '37037' : 'Chatham', '37039' : 'Cherokee', '37041' : 'Chowan', '37043' : 'Clay', '37045' : 'Cleveland', '37047' : 'Columbus', '37049' : 'Craven', '37051' : 'Cumberland', '37053' : 'Currituck', '37055' : 'Dare', '37057' : 'Davidson', '37059' : 'Davie', '37061' : 'Duplin', '37063' : 'Durham', '37065' : 'Edgecombe', '37067' : 'Forsyth', '37069' : 'Franklin', '37071' : 'Gaston', '37073' : 'Gates', '37075' : 'Graham', '37077' : 'Granville', '37079' : 'Greene', '37081' : 'Guilford', '37083' : 'Halifax', '37085' : 'Harnett', '37087' : 'Haywood', '37089' : 'Henderson', '37091' : 'Hertford', '37093' : 'Hoke', '37095' : 'Hyde', '37097' : 'Iredell', '37099' : 'Jackson', '37101' : 'Johnston', '37103' : 'Jones', '37105' : 'Lee', '37107' : 'Lenoir', '37109' : 'Lincoln', '37111' : 'Mcdowell', '37113' : 'Macon', '37115' : 'Madison', '37117' : 'Martin', '37119' : 'Mecklenburg', '37121' : 'Mitchell', '37123' : 'Montgomery', '37125' : 'Moore', '37127' : 'Nash', '37129' : 'New Hanover', '37131' : 'Northampton', '37133' : 'Onslow', '37135' : 'Orange', '37137' : 'Pamlico', '37139' : 'Pasquotank', '37141' : 'Pender', '37143' : 'Perquimans', '37145' : 'Person', '37147' : 'Pitt', '37149' : 'Polk', '37151' : 'Randolph', '37153' : 'Richmond', '37155' : 'Robeson', '37157' : 'Rockingham', '37159' : 'Rowan', '37161' : 'Rutherford', '37163' : 'Sampson', '37165' : 'Scotland', '37167' : 'Stanly', '37169' : 'Stokes', '37171' : 'Surry', '37173' : 'Swain', '37175' : 'Transylvania', '37177' : 'Tyrrell', '37179' : 'Union', '37181' : 'Vance', '37183' : 'Wake', '37185' : 'Warren', '37187' : 'Washington', '37189' : 'Watauga', '37191' : 'Wayne', '37193' : 'Wilkes', '37195' : 'Wilson', '37197' : 'Yadkin', '37199' : 'Yancey', '38001' : 'Adams', '38003' : 'Barnes', '38005' : 'Benson', '38007' : 'Billings', '38009' : 'Bottineau', '38011' : 'Bowman', '38013' : 'Burke', '38015' : 'Burleigh', '38017' : 'Cass', '38019' : 'Cavalier', '38021' : 'Dickey', '38023' : 'Divide', '38025' : 'Dunn', '38027' : 'Eddy', '38029' : 'Emmons', '38031' : 'Foster', '38033' : 'Golden Valley', '38035' : 'Grand Forks', '38037' : 'Grant', '38039' : 'Griggs', '38041' : 'Hettinger', '38043' : 'Kidder', '38045' : 'Lamoure', '38047' : 'Logan', '38049' : 'Mchenry', '38051' : 'Mcintosh', '38053' : 'Mckenzie', '38055' : 'Mclean', '38057' : 'Mercer', '38059' : 'Morton', '38061' : 'Mountrail', '38063' : 'Nelson', '38065' : 'Oliver', '38067' : 'Pembina', '38069' : 'Pierce', '38071' : 'Ramsey', '38073' : 'Ransom', '38075' : 'Renville', '38077' : 'Richland', '38079' : 'Rolette', '38081' : 'Sargent', '38083' : 'Sheridan', '38085' : 'Sioux', '38087' : 'Slope', '38089' : 'Stark', '38091' : 'Steele', '38093' : 'Stutsman', '38095' : 'Towner', '38097' : 'Traill', '38099' : 'Walsh', '38101' : 'Ward', '38103' : 'Wells', '38105' : 'Williams', '39001' : 'Adams', '39003' : 'Allen', '39005' : 'Ashland', '39007' : 'Ashtabula', '39009' : 'Athens', '39011' : 'Auglaize', '39013' : 'Belmont', '39015' : 'Brown', '39017' : 'Butler', '39019' : 'Carroll', '39021' : 'Champaign', '39023' : 'Clark', '39025' : 'Clermont', '39027' : 'Clinton', '39029' : 'Columbiana', '39031' : 'Coshocton', '39033' : 'Crawford', '39035' : 'Cuyahoga', '39037' : 'Darke', '39039' : 'Defiance', '39041' : 'Delaware', '39043' : 'Erie', '39045' : 'Fairfield', '39047' : 'Fayette', '39049' : 'Franklin', '39051' : 'Fulton', '39053' : 'Gallia', '39055' : 'Geauga', '39057' : 'Greene', '39059' : 'Guernsey', '39061' : 'Hamilton', '39063' : 'Hancock', '39065' : 'Hardin', '39067' : 'Harrison', '39069' : 'Henry', '39071' : 'Highland', '39073' : 'Hocking', '39075' : 'Holmes', '39077' : 'Huron', '39079' : 'Jackson', '39081' : 'Jefferson', '39083' : 'Knox', '39085' : 'Lake', '39087' : 'Lawrence', '39089' : 'Licking', '39091' : 'Logan', '39093' : 'Lorain', '39095' : 'Lucas', '39097' : 'Madison', '39099' : 'Mahoning', '39101' : 'Marion', '39103' : 'Medina', '39105' : 'Meigs', '39107' : 'Mercer', '39109' : 'Miami', '39111' : 'Monroe', '39113' : 'Montgomery', '39115' : 'Morgan', '39117' : 'Morrow', '39119' : 'Muskingum', '39121' : 'Noble', '39123' : 'Ottawa', '39125' : 'Paulding', '39127' : 'Perry', '39129' : 'Pickaway', '39131' : 'Pike', '39133' : 'Portage', '39135' : 'Preble', '39137' : 'Putnam', '39139' : 'Richland', '39141' : 'Ross', '39143' : 'Sandusky', '39145' : 'Scioto', '39147' : 'Seneca', '39149' : 'Shelby', '39151' : 'Stark', '39153' : 'Summit', '39155' : 'Trumbull', '39157' : 'Tuscarawas', '39159' : 'Union', '39161' : 'Van Wert', '39163' : 'Vinton', '39165' : 'Warren', '39167' : 'Washington', '39169' : 'Wayne', '39171' : 'Williams', '39173' : 'Wood', '39175' : 'Wyandot', '40001' : 'Adair', '40003' : 'Alfalfa', '40005' : 'Atoka', '40007' : 'Beaver', '40009' : 'Beckham', '40011' : 'Blaine', '40013' : 'Bryan', '40015' : 'Caddo', '40017' : 'Canadian', '40019' : 'Carter', '40021' : 'Cherokee', '40023' : 'Choctaw', '40025' : 'Cimarron', '40027' : 'Cleveland', '40029' : 'Coal', '40031' : 'Comanche', '40033' : 'Cotton', '40035' : 'Craig', '40037' : 'Creek', '40039' : 'Custer', '40041' : 'Delaware', '40043' : 'Dewey', '40045' : 'Ellis', '40047' : 'Garfield', '40049' : 'Garvin', '40051' : 'Grady', '40053' : 'Grant', '40055' : 'Greer', '40057' : 'Harmon', '40059' : 'Harper', '40061' : 'Haskell', '40063' : 'Hughes', '40065' : 'Jackson', '40067' : 'Jefferson', '40069' : 'Johnston', '40071' : 'Kay', '40073' : 'Kingfisher', '40075' : 'Kiowa', '40077' : 'Latimer', '40079' : 'Le Flore', '40081' : 'Lincoln', '40083' : 'Logan', '40085' : 'Love', '40087' : 'Mcclain', '40089' : 'Mccurtain', '40091' : 'Mcintosh', '40093' : 'Major', '40095' : 'Marshall', '40097' : 'Mayes', '40099' : 'Murray', '40101' : 'Muskogee', '40103' : 'Noble', '40105' : 'Nowata', '40107' : 'Okfuskee', '40109' : 'Oklahoma', '40111' : 'Okmulgee', '40113' : 'Osage', '40115' : 'Ottawa', '40117' : 'Pawnee', '40119' : 'Payne', '40121' : 'Pittsburg', '40123' : 'Pontotoc', '40125' : 'Pottawatomie', '40127' : 'Pushmataha', '40129' : 'Roger Mills', '40131' : 'Rogers', '40133' : 'Seminole', '40135' : 'Sequoyah', '40137' : 'Stephens', '40139' : 'Texas', '40141' : 'Tillman', '40143' : 'Tulsa', '40145' : 'Wagoner', '40147' : 'Washington', '40149' : 'Washita', '40151' : 'Woods', '40153' : 'Woodward', '41001' : 'Baker', '41003' : 'Benton', '41005' : 'Clackamas', '41007' : 'Clatsop', '41009' : 'Columbia', '41011' : 'Coos', '41013' : 'Crook', '41015' : 'Curry', '41017' : 'Deschutes', '41019' : 'Douglas', '41021' : 'Gilliam', '41023' : 'Grant', '41025' : 'Harney', '41027' : 'Hood River', '41029' : 'Jackson', '41031' : 'Jefferson', '41033' : 'Josephine', '41035' : 'Klamath', '41037' : 'Lake', '41039' : 'Lane', '41041' : 'Lincoln', '41043' : 'Linn', '41045' : 'Malheur', '41047' : 'Marion', '41049' : 'Morrow', '41051' : 'Multnomah', '41053' : 'Polk', '41055' : 'Sherman', '41057' : 'Tillamook', '41059' : 'Umatilla', '41061' : 'Union', '41063' : 'Wallowa', '41065' : 'Wasco', '41067' : 'Washington', '41069' : 'Wheeler', '41071' : 'Yamhill', '42001' : 'Adams', '42003' : 'Allegheny', '42005' : 'Armstrong', '42007' : 'Beaver', '42009' : 'Bedford', '42011' : 'Berks', '42013' : 'Blair', '42015' : 'Bradford', '42017' : 'Bucks', '42019' : 'Butler', '42021' : 'Cambria', '42023' : 'Cameron', '42025' : 'Carbon', '42027' : 'Centre', '42029' : 'Chester', '42031' : 'Clarion', '42033' : 'Clearfield', '42035' : 'Clinton', '42037' : 'Columbia', '42039' : 'Crawford', '42041' : 'Cumberland', '42043' : 'Dauphin', '42045' : 'Delaware', '42047' : 'Elk', '42049' : 'Erie', '42051' : 'Fayette', '42053' : 'Forest', '42055' : 'Franklin', '42057' : 'Fulton', '42059' : 'Greene', '42061' : 'Huntingdon', '42063' : 'Indiana', '42065' : 'Jefferson', '42067' : 'Juniata', '42069' : 'Lackawanna', '42071' : 'Lancaster', '42073' : 'Lawrence', '42075' : 'Lebanon', '42077' : 'Lehigh', '42079' : 'Luzerne', '42081' : 'Lycoming', '42083' : 'Mckean', '42085' : 'Mercer', '42087' : 'Mifflin', '42089' : 'Monroe', '42091' : 'Montgomery', '42093' : 'Montour', '42095' : 'Northampton', '42097' : 'Northumberland', '42099' : 'Perry', '42101' : 'Philadelphia', '42103' : 'Pike', '42105' : 'Potter', '42107' : 'Schuylkill', '42109' : 'Snyder', '42111' : 'Somerset', '42113' : 'Sullivan', '42115' : 'Susquehanna', '42117' : 'Tioga', '42119' : 'Union', '42121' : 'Venango', '42123' : 'Warren', '42125' : 'Washington', '42127' : 'Wayne', '42129' : 'Westmoreland', '42131' : 'Wyoming', '42133' : 'York', '44001' : 'Bristol', '44003' : 'Kent', '44005' : 'Newport', '44007' : 'Providence', '44009' : 'Washington', '45001' : 'Abbeville', '45003' : 'Aiken', '45005' : 'Allendale', '45007' : 'Anderson', '45009' : 'Bamberg', '45011' : 'Barnwell', '45013' : 'Beaufort', '45015' : 'Berkeley', '45017' : 'Calhoun', '45019' : 'Charleston', '45021' : 'Cherokee', '45023' : 'Chester', '45025' : 'Chesterfield', '45027' : 'Clarendon', '45029' : 'Colleton', '45031' : 'Darlington', '45033' : 'Dillon', '45035' : 'Dorchester', '45037' : 'Edgefield', '45039' : 'Fairfield', '45041' : 'Florence', '45043' : 'Georgetown', '45045' : 'Greenville', '45047' : 'Greenwood', '45049' : 'Hampton', '45051' : 'Horry', '45053' : 'Jasper', '45055' : 'Kershaw', '45057' : 'Lancaster', '45059' : 'Laurens', '45061' : 'Lee', '45063' : 'Lexington', '45065' : 'Mccormick', '45067' : 'Marion', '45069' : 'Marlboro', '45071' : 'Newberry', '45073' : 'Oconee', '45075' : 'Orangeburg', '45077' : 'Pickens', '45079' : 'Richland', '45081' : 'Saluda', '45083' : 'Spartanburg', '45085' : 'Sumter', '45087' : 'Union', '45089' : 'Williamsburg', '45091' : 'York', '46003' : 'Aurora', '46005' : 'Beadle', '46007' : 'Bennett', '46009' : 'Bon Homme', '46011' : 'Brookings', '46013' : 'Brown', '46015' : 'Brule', '46017' : 'Buffalo', '46019' : 'Butte', '46021' : 'Campbell', '46023' : 'Charles Mix', '46025' : 'Clark', '46027' : 'Clay', '46029' : 'Codington', '46031' : 'Corson', '46033' : 'Custer', '46035' : 'Davison', '46037' : 'Day', '46039' : 'Deuel', '46041' : 'Dewey', '46043' : 'Douglas', '46045' : 'Edmunds', '46047' : 'Fall River', '46049' : 'Faulk', '46051' : 'Grant', '46053' : 'Gregory', '46055' : 'Haakon', '46057' : 'Hamlin', '46059' : 'Hand', '46061' : 'Hanson', '46063' : 'Harding', '46065' : 'Hughes', '46067' : 'Hutchinson', '46069' : 'Hyde', '46071' : 'Jackson', '46073' : 'Jerauld', '46075' : 'Jones', '46077' : 'Kingsbury', '46079' : 'Lake', '46081' : 'Lawrence', '46083' : 'Lincoln', '46085' : 'Lyman', '46087' : 'Mccook', '46089' : 'Mcpherson', '46091' : 'Marshall', '46093' : 'Meade', '46095' : 'Mellette', '46097' : 'Miner', '46099' : 'Minnehaha', '46101' : 'Moody', '46102' : 'Oglala Lakota', '46103' : 'Pennington', '46105' : 'Perkins', '46107' : 'Potter', '46109' : 'Roberts', '46111' : 'Sanborn', '46113' : 'Shannon', #deprecated 10/2015, use 46102 Oglala Lakota '46115' : 'Spink', '46117' : 'Stanley', '46119' : 'Sully', '46121' : 'Todd', '46123' : 'Tripp', '46125' : 'Turner', '46127' : 'Union', '46129' : 'Walworth', '46135' : 'Yankton', '46137' : 'Ziebach', '47001' : 'Anderson', '47003' : 'Bedford', '47005' : 'Benton', '47007' : 'Bledsoe', '47009' : 'Blount', '47011' : 'Bradley', '47013' : 'Campbell', '47015' : 'Cannon', '47017' : 'Carroll', '47019' : 'Carter', '47021' : 'Cheatham', '47023' : 'Chester', '47025' : 'Claiborne', '47027' : 'Clay', '47029' : 'Cocke', '47031' : 'Coffee', '47033' : 'Crockett', '47035' : 'Cumberland', '47037' : 'Davidson', '47039' : 'Decatur', '47041' : 'Dekalb', '47043' : 'Dickson', '47045' : 'Dyer', '47047' : 'Fayette', '47049' : 'Fentress', '47051' : 'Franklin', '47053' : 'Gibson', '47055' : 'Giles', '47057' : 'Grainger', '47059' : 'Greene', '47061' : 'Grundy', '47063' : 'Hamblen', '47065' : 'Hamilton', '47067' : 'Hancock', '47069' : 'Hardeman', '47071' : 'Hardin', '47073' : 'Hawkins', '47075' : 'Haywood', '47077' : 'Henderson', '47079' : 'Henry', '47081' : 'Hickman', '47083' : 'Houston', '47085' : 'Humphreys', '47087' : 'Jackson', '47089' : 'Jefferson', '47091' : 'Johnson', '47093' : 'Knox', '47095' : 'Lake', '47097' : 'Lauderdale', '47099' : 'Lawrence', '47101' : 'Lewis', '47103' : 'Lincoln', '47105' : 'Loudon', '47107' : 'Mcminn', '47109' : 'Mcnairy', '47111' : 'Macon', '47113' : 'Madison', '47115' : 'Marion', '47117' : 'Marshall', '47119' : 'Maury', '47121' : 'Meigs', '47123' : 'Monroe', '47125' : 'Montgomery', '47127' : 'Moore', '47129' : 'Morgan', '47131' : 'Obion', '47133' : 'Overton', '47135' : 'Perry', '47137' : 'Pickett', '47139' : 'Polk', '47141' : 'Putnam', '47143' : 'Rhea', '47145' : 'Roane', '47147' : 'Robertson', '47149' : 'Rutherford', '47151' : 'Scott', '47153' : 'Sequatchie', '47155' : 'Sevier', '47157' : 'Shelby', '47159' : 'Smith', '47161' : 'Stewart', '47163' : 'Sullivan', '47165' : 'Sumner', '47167' : 'Tipton', '47169' : 'Trousdale', '47171' : 'Unicoi', '47173' : 'Union', '47175' : 'Van Buren', '47177' : 'Warren', '47179' : 'Washington', '47181' : 'Wayne', '47183' : 'Weakley', '47185' : 'White', '47187' : 'Williamson', '47189' : 'Wilson', '48001' : 'Anderson', '48003' : 'Andrews', '48005' : 'Angelina', '48007' : 'Aransas', '48009' : 'Archer', '48011' : 'Armstrong', '48013' : 'Atascosa', '48015' : 'Austin', '48017' : 'Bailey', '48019' : 'Bandera', '48021' : 'Bastrop', '48023' : 'Baylor', '48025' : 'Bee', '48027' : 'Bell', '48029' : 'Bexar', '48031' : 'Blanco', '48033' : 'Borden', '48035' : 'Bosque', '48037' : 'Bowie', '48039' : 'Brazoria', '48041' : 'Brazos', '48043' : 'Brewster', '48045' : 'Briscoe', '48047' : 'Brooks', '48049' : 'Brown', '48051' : 'Burleson', '48053' : 'Burnet', '48055' : 'Caldwell', '48057' : 'Calhoun', '48059' : 'Callahan', '48061' : 'Cameron', '48063' : 'Camp', '48065' : 'Carson', '48067' : 'Cass', '48069' : 'Castro', '48071' : 'Chambers', '48073' : 'Cherokee', '48075' : 'Childress', '48077' : 'Clay', '48079' : 'Cochran', '48081' : 'Coke', '48083' : 'Coleman', '48085' : 'Collin', '48087' : 'Collingsworth', '48089' : 'Colorado', '48091' : 'Comal', '48093' : 'Comanche', '48095' : 'Concho', '48097' : 'Cooke', '48099' : 'Coryell', '48101' : 'Cottle', '48103' : 'Crane', '48105' : 'Crockett', '48107' : 'Crosby', '48109' : 'Culberson', '48111' : 'Dallam', '48113' : 'Dallas', '48115' : 'Dawson', '48117' : 'Deaf Smith', '48119' : 'Delta', '48121' : 'Denton', '48123' : 'Dewitt', '48125' : 'Dickens', '48127' : 'Dimmit', '48129' : 'Donley', '48131' : 'Duval', '48133' : 'Eastland', '48135' : 'Ector', '48137' : 'Edwards', '48139' : 'Ellis', '48141' : 'El Paso', '48143' : 'Erath', '48145' : 'Falls', '48147' : 'Fannin', '48149' : 'Fayette', '48151' : 'Fisher', '48153' : 'Floyd', '48155' : 'Foard', '48157' : 'Fort Bend', '48159' : 'Franklin', '48161' : 'Freestone', '48163' : 'Frio', '48165' : 'Gaines', '48167' : 'Galveston', '48169' : 'Garza', '48171' : 'Gillespie', '48173' : 'Glasscock', '48175' : 'Goliad', '48177' : 'Gonzales', '48179' : 'Gray', '48181' : 'Grayson', '48183' : 'Gregg', '48185' : 'Grimes', '48187' : 'Guadalupe', '48189' : 'Hale', '48191' : 'Hall', '48193' : 'Hamilton', '48195' : 'Hansford', '48197' : 'Hardeman', '48199' : 'Hardin', '48201' : 'Harris', '48203' : 'Harrison', '48205' : 'Hartley', '48207' : 'Haskell', '48209' : 'Hays', '48211' : 'Hemphill', '48213' : 'Henderson', '48215' : 'Hidalgo', '48217' : 'Hill', '48219' : 'Hockley', '48221' : 'Hood', '48223' : 'Hopkins', '48225' : 'Houston', '48227' : 'Howard', '48229' : 'Hudspeth', '48231' : 'Hunt', '48233' : 'Hutchinson', '48235' : 'Irion', '48237' : 'Jack', '48239' : 'Jackson', '48241' : 'Jasper', '48243' : 'Jeff Davis', '48245' : 'Jefferson', '48247' : 'Jim Hogg', '48249' : 'Jim Wells', '48251' : 'Johnson', '48253' : 'Jones', '48255' : 'Karnes', '48257' : 'Kaufman', '48259' : 'Kendall', '48261' : 'Kenedy', '48263' : 'Kent', '48265' : 'Kerr', '48267' : 'Kimble', '48269' : 'King', '48271' : 'Kinney', '48273' : 'Kleberg', '48275' : 'Knox', '48277' : 'Lamar', '48279' : 'Lamb', '48281' : 'Lampasas', '48283' : 'La Salle', '48285' : 'Lavaca', '48287' : 'Lee', '48289' : 'Leon', '48291' : 'Liberty', '48293' : 'Limestone', '48295' : 'Lipscomb', '48297' : 'Live Oak', '48299' : 'Llano', '48301' : 'Loving', '48303' : 'Lubbock', '48305' : 'Lynn', '48307' : 'Mcculloch', '48309' : 'Mclennan', '48311' : 'Mcmullen', '48313' : 'Madison', '48315' : 'Marion', '48317' : 'Martin', '48319' : 'Mason', '48321' : 'Matagorda', '48323' : 'Maverick', '48325' : 'Medina', '48327' : 'Menard', '48329' : 'Midland', '48331' : 'Milam', '48333' : 'Mills', '48335' : 'Mitchell', '48337' : 'Montague', '48339' : 'Montgomery', '48341' : 'Moore', '48343' : 'Morris', '48345' : 'Motley', '48347' : 'Nacogdoches', '48349' : 'Navarro', '48351' : 'Newton', '48353' : 'Nolan', '48355' : 'Nueces', '48357' : 'Ochiltree', '48359' : 'Oldham', '48361' : 'Orange', '48363' : 'Palo Pinto', '48365' : 'Panola', '48367' : 'Parker', '48369' : 'Parmer', '48371' : 'Pecos', '48373' : 'Polk', '48375' : 'Potter', '48377' : 'Presidio', '48379' : 'Rains', '48381' : 'Randall', '48383' : 'Reagan', '48385' : 'Real', '48387' : 'Red River', '48389' : 'Reeves', '48391' : 'Refugio', '48393' : 'Roberts', '48395' : 'Robertson', '48397' : 'Rockwall', '48399' : 'Runnels', '48401' : 'Rusk', '48403' : 'Sabine', '48405' : 'San Augustine', '48407' : 'San Jacinto', '48409' : 'San Patricio', '48411' : 'San Saba', '48413' : 'Schleicher', '48415' : 'Scurry', '48417' : 'Shackelford', '48419' : 'Shelby', '48421' : 'Sherman', '48423' : 'Smith', '48425' : 'Somervell', '48427' : 'Starr', '48429' : 'Stephens', '48431' : 'Sterling', '48433' : 'Stonewall', '48435' : 'Sutton', '48437' : 'Swisher', '48439' : 'Tarrant', '48441' : 'Taylor', '48443' : 'Terrell', '48445' : 'Terry', '48447' : 'Throckmorton', '48449' : 'Titus', '48451' : 'Tom Green', '48453' : 'Travis', '48455' : 'Trinity', '48457' : 'Tyler', '48459' : 'Upshur', '48461' : 'Upton', '48463' : 'Uvalde', '48465' : 'Val Verde', '48467' : 'Van Zandt', '48469' : 'Victoria', '48471' : 'Walker', '48473' : 'Waller', '48475' : 'Ward', '48477' : 'Washington', '48479' : 'Webb', '48481' : 'Wharton', '48483' : 'Wheeler', '48485' : 'Wichita', '48487' : 'Wilbarger', '48489' : 'Willacy', '48491' : 'Williamson', '48493' : 'Wilson', '48495' : 'Winkler', '48497' : 'Wise', '48499' : 'Wood', '48501' : 'Yoakum', '48503' : 'Young', '48505' : 'Zapata', '48507' : 'Zavala', '49001' : 'Beaver', '49003' : 'Box Elder', '49005' : 'Cache', '49007' : 'Carbon', '49009' : 'Daggett', '49011' : 'Davis', '49013' : 'Duchesne', '49015' : 'Emery', '49017' : 'Garfield', '49019' : 'Grand', '49021' : 'Iron', '49023' : 'Juab', '49025' : 'Kane', '49027' : 'Millard', '49029' : 'Morgan', '49031' : 'Piute', '49033' : 'Rich', '49035' : 'Salt Lake', '49037' : 'San Juan', '49039' : 'Sanpete', '49041' : 'Sevier', '49043' : 'Summit', '49045' : 'Tooele', '49047' : 'Uintah', '49049' : 'Utah', '49051' : 'Wasatch', '49053' : 'Washington', '49055' : 'Wayne', '49057' : 'Weber', '50001' : 'Addison', '50003' : 'Bennington', '50005' : 'Caledonia', '50007' : 'Chittenden', '50009' : 'Essex', '50011' : 'Franklin', '50013' : 'Grand Isle', '50015' : 'Lamoille', '50017' : 'Orange', '50019' : 'Orleans', '50021' : 'Rutland', '50023' : 'Washington', '50025' : 'Windham', '50027' : 'Windsor', '51001' : 'Accomack', '51003' : 'Albemarle', '51005' : 'Alleghany', '51007' : 'Amelia', '51009' : 'Amherst', '51011' : 'Appomattox', '51013' : 'Arlington', '51015' : 'Augusta', '51017' : 'Bath', '51019' : 'Bedford', '51021' : 'Bland', '51023' : 'Botetourt', '51025' : 'Brunswick', '51027' : 'Buchanan', '51029' : 'Buckingham', '51031' : 'Campbell', '51033' : 'Caroline', '51035' : 'Carroll', '51036' : 'Charles City', '51037' : 'Charlotte', '51041' : 'Chesterfield', '51043' : 'Clarke', '51045' : 'Craig', '51047' : 'Culpeper', '51049' : 'Cumberland', '51051' : 'Dickenson', '51053' : 'Dinwiddie', '51057' : 'Essex', '51059' : 'Fairfax', '51061' : 'Fauquier', '51063' : 'Floyd', '51065' : 'Fluvanna', '51067' : 'Franklin', '51069' : 'Frederick', '51071' : 'Giles', '51073' : 'Gloucester', '51075' : 'Goochland', '51077' : 'Grayson', '51079' : 'Greene', '51081' : 'Greensville', '51083' : 'Halifax', '51085' : 'Hanover', '51087' : 'Henrico', '51089' : 'Henry', '51091' : 'Highland', '51093' : 'Isle of Wight', '51095' : 'James City', '51097' : 'King and Queen', '51099' : 'King George', '51101' : 'King William', '51103' : 'Lancaster', '51105' : 'Lee', '51107' : 'Loudoun', '51109' : 'Louisa', '51111' : 'Lunenburg', '51113' : 'Madison', '51115' : 'Mathews', '51117' : 'Mecklenburg', '51119' : 'Middlesex', '51121' : 'Montgomery', '51125' : 'Nelson', '51127' : 'New Kent', '51131' : 'Northampton', '51133' : 'Northumberland', '51135' : 'Nottoway', '51137' : 'Orange', '51139' : 'Page', '51141' : 'Patrick', '51143' : 'Pittsylvania', '51145' : 'Powhatan', '51147' : 'Prince Edward', '51149' : 'Prince George', '51153' : 'Prince William', '51155' : 'Pulaski', '51157' : 'Rappahannock', '51159' : 'Richmond', '51161' : 'Roanoke', '51163' : 'Rockbridge', '51165' : 'Rockingham', '51167' : 'Russell', '51169' : 'Scott', '51171' : 'Shenandoah', '51173' : 'Smyth', '51175' : 'Southampton', '51177' : 'Spotsylvania', '51179' : 'Stafford', '51181' : 'Surry', '51183' : 'Sussex', '51185' : 'Tazewell', '51187' : 'Warren', '51191' : 'Washington', '51193' : 'Westmoreland', '51195' : 'Wise', '51197' : 'Wythe', '51199' : 'York', '51510' : 'City of Alexandria', '51515' : 'City of Bedford', '51520' : 'City of Bristol', '51530' : 'City of Buena Vista', '51540' : 'City of Charlottesville', '51550' : 'City of Chesapeake', '51560' : 'City of Clifton Forge', '51570' : 'City of Colonial Heights', '51580' : 'City of Covington', '51590' : 'City of Danville', '51595' : 'City of Emporia', '51600' : 'City of Fairfax', '51610' : 'City of Falls Church', '51620' : 'City of Franklin', '51630' : 'City of Fredericksburg', '51640' : 'City of Galax', '51650' : 'City of Hampton', '51660' : 'City of Harrisonburg', '51670' : 'City of Hopewell', '51678' : 'City of Lexington', '51680' : 'City of Lynchburg', '51683' : 'City of Manassas', '51685' : 'City of Manassas Park', '51690' : 'City of Martinsville', '51700' : 'City of Newport News', '51710' : 'City of Norfolk', '51720' : 'City of Norton', '51730' : 'City of Petersburg', '51735' : 'City of Poquoson', '51740' : 'City of Portsmouth', '51750' : 'City of Radford', '51760' : 'City of Richmond', '51770' : 'City of Roanoke', '51775' : 'City of Salem', '51790' : 'City of Staunton', '51800' : 'City of Suffolk', '51810' : 'City of Virginia Beach', '51820' : 'City of Waynesboro', '51830' : 'City of Williamsburg', '51840' : 'City of Winchester', '53001' : 'Adams', '53003' : 'Asotin', '53005' : 'Benton', '53007' : 'Chelan', '53009' : 'Clallam', '53011' : 'Clark', '53013' : 'Columbia', '53015' : 'Cowlitz', '53017' : 'Douglas', '53019' : 'Ferry', '53021' : 'Franklin', '53023' : 'Garfield', '53025' : 'Grant', '53027' : 'Grays Harbor', '53029' : 'Island', '53031' : 'Jefferson', '53033' : 'King', '53035' : 'Kitsap', '53037' : 'Kittitas', '53039' : 'Klickitat', '53041' : 'Lewis', '53043' : 'Lincoln', '53045' : 'Mason', '53047' : 'Okanogan', '53049' : 'Pacific', '53051' : 'Pend Oreille', '53053' : 'Pierce', '53055' : 'San Juan', '53057' : 'Skagit', '53059' : 'Skamania', '53061' : 'Snohomish', '53063' : 'Spokane', '53065' : 'Stevens', '53067' : 'Thurston', '53069' : 'Wahkiakum', '53071' : 'Walla Walla', '53073' : 'Whatcom', '53075' : 'Whitman', '53077' : 'Yakima', '54001' : 'Barbour', '54003' : 'Berkeley', '54005' : 'Boone', '54007' : 'Braxton', '54009' : 'Brooke', '54011' : 'Cabell', '54013' : 'Calhoun', '54015' : 'Clay', '54017' : 'Doddridge', '54019' : 'Fayette', '54021' : 'Gilmer', '54023' : 'Grant', '54025' : 'Greenbrier', '54027' : 'Hampshire', '54029' : 'Hancock', '54031' : 'Hardy', '54033' : 'Harrison', '54035' : 'Jackson', '54037' : 'Jefferson', '54039' : 'Kanawha', '54041' : 'Lewis', '54043' : 'Lincoln', '54045' : 'Logan', '54047' : 'Mcdowell', '54049' : 'Marion', '54051' : 'Marshall', '54053' : 'Mason', '54055' : 'Mercer', '54057' : 'Mineral', '54059' : 'Mingo', '54061' : 'Monongalia', '54063' : 'Monroe', '54065' : 'Morgan', '54067' : 'Nicholas', '54069' : 'Ohio', '54071' : 'Pendleton', '54073' : 'Pleasants', '54075' : 'Pocahontas', '54077' : 'Preston', '54079' : 'Putnam', '54081' : 'Raleigh', '54083' : 'Randolph', '54085' : 'Ritchie', '54087' : 'Roane', '54089' : 'Summers', '54091' : 'Taylor', '54093' : 'Tucker', '54095' : 'Tyler', '54097' : 'Upshur', '54099' : 'Wayne', '54101' : 'Webster', '54103' : 'Wetzel', '54105' : 'Wirt', '54107' : 'Wood', '54109' : 'Wyoming', '55001' : 'Adams', '55003' : 'Ashland', '55005' : 'Barron', '55007' : 'Bayfield', '55009' : 'Brown', '55011' : 'Buffalo', '55013' : 'Burnett', '55015' : 'Calumet', '55017' : 'Chippewa', '55019' : 'Clark', '55021' : 'Columbia', '55023' : 'Crawford', '55025' : 'Dane', '55027' : 'Dodge', '55029' : 'Door', '55031' : 'Douglas', '55033' : 'Dunn', '55035' : 'Eau Claire', '55037' : 'Florence', '55039' : 'Fond du Lac', '55041' : 'Forest', '55043' : 'Grant', '55045' : 'Green', '55047' : 'Green Lake', '55049' : 'Iowa', '55051' : 'Iron', '55053' : 'Jackson', '55055' : 'Jefferson', '55057' : 'Juneau', '55059' : 'Kenosha', '55061' : 'Kewaunee', '55063' : 'La Crosse', '55065' : 'Lafayette', '55067' : 'Langlade', '55069' : 'Lincoln', '55071' : 'Manitowoc', '55073' : 'Marathon', '55075' : 'Marinette', '55077' : 'Marquette', '55078' : 'Menominee', '55079' : 'Milwaukee', '55081' : 'Monroe', '55083' : 'Oconto', '55085' : 'Oneida', '55087' : 'Outagamie', '55089' : 'Ozaukee', '55091' : 'Pepin', '55093' : 'Pierce', '55095' : 'Polk', '55097' : 'Portage', '55099' : 'Price', '55101' : 'Racine', '55103' : 'Richland', '55105' : 'Rock', '55107' : 'Rusk', '55109' : 'Saint Croix', '55111' : 'Sauk', '55113' : 'Sawyer', '55115' : 'Shawano', '55117' : 'Sheboygan', '55119' : 'Taylor', '55121' : 'Trempealeau', '55123' : 'Vernon', '55125' : 'Vilas', '55127' : 'Walworth', '55129' : 'Washburn', '55131' : 'Washington', '55133' : 'Waukesha', '55135' : 'Waupaca', '55137' : 'Waushara', '55139' : 'Winnebago', '55141' : 'Wood', '56001' : 'Albany', '56003' : 'Big Horn', '56005' : 'Campbell', '56007' : 'Carbon', '56009' : 'Converse', '56011' : 'Crook', '56013' : 'Fremont', '56015' : 'Goshen', '56017' : 'Hot Springs', '56019' : 'Johnson', '56021' : 'Laramie', '56023' : 'Lincoln', '56025' : 'Natrona', '56027' : 'Niobrara', '56029' : 'Park', '56031' : 'Platte', '56033' : 'Sheridan', '56035' : 'Sublette', '56037' : 'Sweetwater', '56039' : 'Teton', '56041' : 'Uinta', '56043' : 'Washakie', '56045' : 'Weston', '60010' : 'Eastern District', '60020' : 'Manu\'A District', '60030' : 'Rose Island', '60040' : 'Swains Island', '60050' : 'Western District', '66010' : 'Guam', '69085' : 'Northern Islands', '69100' : 'Rota', '69110' : 'Saipan', '69120' : 'Tinian', '72001' : 'Adjuntas', '72003' : 'Aguada', '72005' : 'Aguadilla', '72007' : 'Aguas Buenas', '72009' : 'Aibonito', '72011' : 'Anasco', '72013' : 'Arecibo', '72015' : 'Arroyo', '72017' : 'Barceloneta', '72019' : 'Barranquitas', '72021' : 'Bayamo\'N', '72023' : 'Cabo Rojo', '72025' : 'Caguas', '72027' : 'Camuy', '72029' : 'Canovanas', '72031' : 'Carolina', '72033' : 'Catano', '72035' : 'Cayey', '72037' : 'Ceiba', '72039' : 'Ciales', '72041' : 'Cidra', '72043' : 'Coamo', '72045' : 'Comerio', '72047' : 'Corozal', '72049' : 'Culebra', '72051' : 'Dorado', '72053' : 'Fajardo', '72054' : 'Florida', '72055' : 'Guanica', '72057' : 'Guayama', '72059' : 'Guayanilla', '72061' : 'Guaynabo', '72063' : 'Gurabo', '72065' : 'Hatillo', '72067' : 'Hormigueros', '72069' : 'Humacao', '72071' : 'Lsabela', '72073' : 'Jayuya', '72075' : 'Juana Diaz', '72077' : 'Juncos', '72079' : 'Lajas', '72081' : 'Lares', '72083' : 'Las Marias', '72085' : 'Las Piedras', '72087' : 'Loiza', '72089' : 'Luquillo', '72091' : 'Manati', '72093' : 'Maricao', '72095' : 'Maunabo', '72097' : 'Mayaguez', '72099' : 'Moca', '72101' : 'Morovis', '72103' : 'Naguabo', '72105' : 'Naranjito', '72107' : 'Orocovis', '72109' : 'Patillas', '72111' : 'Penuelas', '72113' : 'Ponce', '72115' : 'Quebradillas', '72117' : 'Rincon', '72119' : 'Rio Grande', '72121' : 'Sabana Grande', '72123' : 'Salinas', '72125' : 'San German', '72127' : 'San Juan', '72129' : 'San Lorenzo', '72131' : 'San Sebastian', '72133' : 'Santa Isabel', '72135' : 'Toa Alta', '72137' : 'Toa Baja', '72139' : 'Trujillo Alto', '72141' : 'Utuado', '72143' : 'Vega Alta', '72145' : 'Vega Baja', '72147' : 'Vieques', '72149' : 'Villalba', '72151' : 'Yabucoa', '72153' : 'Yauco', '78010' : 'Saint Croix', '78020' : 'Saint John', '78030' : 'Saint Thomas', '64002' : 'Chuuk*', '64005' : 'Kosrae', '64040' : 'Pohnpeit*', '64060' : 'Yap', '68007' : 'Ailinginae', '68010' : 'Ailinglaplap', '68030' : 'Ailuk', '68040' : 'Arno', '68050' : 'Aur', '68060' : 'Bikar', '68070' : 'Bikini', '68073' : 'Bokak', '68080' : 'Ebon', '68090' : 'Enewetak', '68100' : 'Erikub', '68110' : 'Jabat', '68120' : 'Jaluit', '68130' : 'Jemo', '68140' : 'Kili', '68150' : 'Kwajalein', '68160' : 'Lae', '68170' : 'Lib', '68180' : 'Likiep', '68190' : 'Majuro', '68300' : 'Maloelap', '68310' : 'Mejit', '68320' : 'Mili', '68330' : 'Namorik', '68340' : 'Namu', '68350' : 'Rongelap', '68360' : 'Rongrik', '68385' : 'Toke', '68390' : 'Ujae', '68400' : 'Ujelang', '68410' : 'Utrik', '68420' : 'Wotho', '68430' : 'Wotle', '70002' : 'Aimeliik', '70004' : 'Airai', '70010' : 'Angaur', '70050' : 'Hatoboheit*', '70100' : 'Kayangel', '70150' : 'Koror', '70212' : 'Melekeok*', '70214' : 'Ngaraard', '70218' : 'Ngarchelong', '70222' : 'Ngardmau', '70224' : 'Ngatpang', '70226' : 'Ngchesar', '70227' : 'Ngernmlengui*', '70228' : 'Ngiwal', '70350' : 'Peleliu', '70370' : 'Sonsorol', '74050' : 'Baker Island', '74100' : 'Howland Island', '74150' : 'Jarvis Island', '74200' : 'Johnston Island', '74250' : 'Kingman Reef', '74300' : 'Midway Islands', '74350' : 'Navassa Island', '74400' : 'Palmyra Atoll', '74450' : 'Wake Island', #Marine Locations '75610' : 'Lake Okeechobee', '77657' : 'Coastal waters from East Cape Sable to Chokoloskee FL out 20 NM', '77656' : 'Coastal waters from Chokoloskee to Bonita Beach FL out 20 NM', '77853' : 'Coastal waters from Englewood to Tarpon Springs FL out 20 NM', '77830' : 'Tampa Bay waters', '77250' : 'Coastal waters from Baffin Bay to Port Aransas out 20 NM', '77255' : 'Coastal waters from Port Aransas to Matagorda Ship Channel out 20 NM', '77155' : 'Coastal waters from Baffin Bay to Port Mansfield TX out 20 NM', '77135' : 'Laguna Madre From 5 nm North of Port Mansfield To Baffin Bay TX', '77150' : 'Coastal waters from Port Mansfield TX to the Rio Grande River out 20 NM', '77132' : 'Laguna Madre From The Arroyo Coloardo To 5 NM North of Port Mansfield TX', '77130' : 'Laguna Madre From the Port of Brownsville to the Arroyo Colorado', '75671' : 'Waters from Deerfield Beach to Ocean Reef FL from 20 to 60 NM excluding the territorial waters of Bahamas', '77676' : 'Waters from Chokoloskee to Bonita Beach FL from 20 to 60 NM', '77876' : 'Waters from Bonita Beach to Englewood FL out 20 to 60 NM', '77873' : 'Waters from Englewood to Tarpon Springs FL out 20 to 60 NM', '77170' : 'Waters from Port Mansfield TX to the Rio Grande River from 20 to 60 NM', '77175' : 'Waters from Baffin Bay to Port Mansfield TX from 20 to 60 NM', '77270' : 'Waters from Baffin Bay to Port Aransas from 20 to 60 NM', '77275' : 'Waters from Port Aransas to Matagorda Ship Channel from 20 to 60 NM', '77370' : 'Waters from Freeport to Matagorda Ship Channel TX from 20 to 60 NM', '75712' : 'Coastal Waters of Northern Puerto Rico out 10 NM', '75725' : 'Coastal Waters of Southern USVI, Vieques, and Eastern Puerto Rico out 10 NM', '75745' : 'Coastal Waters OF Southwestern Puerto Rico out 10 NM', '75735' : 'Coastal Waters of Southern Puerto Rico out 10 NM', '75742' : 'Coastal Waters OF Northwestern Puerto Rico out 10 NM', '75741' : 'Mona Passage Southward to 17N', '75732' : 'Caribbean Waters of Puerto Rico from 10 NM to 17N', '75651' : 'Coastal waters from Deerfield Beach to Ocean Reef FL out 20 NM', '59124' : 'Big Island Southeast Waters', '59123' : 'Big Island Leeward Waters', '59122' : 'Big Island Windward Waters', '59119' : 'Maalaea Bay', '59118' : 'Maui County Leeward Waters', '59120' : 'Pailolo Channel', '59116' : 'Kaiwi Channel', '59115' : 'Oahu Leeward Waters', '59113' : 'Kauai Channel', '59114' : 'Oahu Windward Waters', '59112' : 'Kauai Leeward Waters', '59111' : 'Kauai Windward Waters', '59110' : 'Kauai Northwest Waters', '75710' : 'Atlantic Waters of Puerto Rico AND USVI from 10 NM to 19.5N', '75722' : 'Anegada Passage Southward to 17N', '75715' : 'Coastal Waters of Northern USVI and Culebra out 10 NM', '61150' : 'Coastal waters of Tututila and Aunuu', '61151' : 'Coastal waters of Manua', '75650' : 'Coastal waters from Jupiter Inlet to Deerfield Beach FL out 20 NM', '75630' : 'Biscayne Bay', '77052' : 'Straits of Florida from Ocean Reef to Craig Key out 20 NM', '77031' : 'Florida Bay including Barnes Sound, Blackwater Sound, and Buttonwood Sound', '77053' : 'Straits of Florida from Craig Key to west end of Seven Mile Bridge out 20 NM', '77054' : 'Straits of Florida from west end of Seven Mile Bridge to south of Halfmoon Shoal out 20 NM', '77075' : 'Straits of Florida from Halfmoon Shoal to 20 NM west of Dry Tortugas 20 to 60 NM out', '77073' : 'Straits of Florida from Craig Key to west end of Seven Mile Bridge 20 to 60 NM out', '77072' : 'Straits of Florida from Ocean Reef to Craig Key 20 to 60 NM out', '77033' : 'Gulf waters from East Cape Sable to Chokoloskee 20 to 60 NM out and beyond 5 fathoms', '77042' : 'Hawk Channel from Ocean Reef to Craig Key out to the reef', '77044' : 'Hawk Channel from west end of Seven Mile Bridge to Halfmoon Shoal out to the reef', '77074' : 'Straits of Florida from west end of Seven Mile Bridge to south of Halfmoon Shoal 20 to 60 NM out', '77034' : 'Gulf of Mexico including Dry Tortugas and Rebecca Shoal Channel', '77055' : 'Straits of Florida from Halfmoon Shoal to 20 NM west of Dry Tortugas out 20 NM', '77043' : 'Hawk Channel from Craig Key to west end of Seven Mile Bridge out to the reef', '77035' : 'Gulf of Mexico from West End of Seven Mile Bridge to Halfmoon Shoal out to 5 Fathoms', '77230' : 'Bays and Waterways from Baffin Bay to Port Aransas', '65151' : 'Guam Coastal Waters', '65152' : 'Rota Coastal Waters', '65153' : 'Tinian Coastal Waters', '65161' : 'Koror Palau Coastal Waters', '65171' : 'Yap Coastal Waters', '65172' : 'Chuuk Coastal Waters', '65173' : 'Pohnpei Coastal Waters', '65174' : 'Kosrae Coastal Waters', '65181' : 'Majuro Coastal Waters', '65191' : 'Waters out to 40 Nautical Miles', '73530' : 'Chesapeake Bay north of Pooles Island MD', '73430' : 'Delaware Bay waters north of East Point NJ to Slaughter Beach DE', '73431' : 'Delaware Bay waters south of East Point NJ to Slaughter Beach DE', '73454' : 'Coastal waters from Cape May NJ to Cape Henlopen DE out 20 nm', '73455' : 'Coastal waters from Cape Henlopen to Fenwick Island DE out 20 nm', '73650' : 'Coastal waters from Fenwick Island DE to Chincoteague VA out 20 nm', '73652' : 'Coastal waters from Chincoteague to Parramore Island VA out 20 nm', '73654' : 'Coastal waters from Parramore Island to Cape Charles Light VA out 20 nm', '73538' : 'Patapsco River including Baltimore Harbor', '73531' : 'Chesapeake Bay from Pooles Island to Sandy Point MD', '73539' : 'Chester River to Queenstown MD', '73540' : 'Eastern Bay', '73533' : 'Chesapeake Bay from North Beach to Drum Point MD', '73534' : 'Chesapeake Bay from Drum Point MD to Smith Point VA', '73535' : 'Tidal Potomac from Key Bridge to Indian Head MD', '73536' : 'Tidal Potomac from Indian Head to Cobb Island MD', '73630' : 'Chesapeake Bay from Smith Point to Windmill Point VA', '73537' : 'Tidal Potomac from Cobb Island MD to Smith Point VA', '73631' : 'Chesapeake Bay from Windmill Point to New Point Comfort VA', '73632' : 'Chesapeake Bay from New Point Comfort to Little Creek VA', '73532' : 'Chesapeake Bay from Sandy Point to North Beach MD', '73453' : 'Coastal waters from Great Egg Inlet to Cape May NJ out 20 nm', '73452' : 'Coastal waters from Little Egg Inlet to Great Egg Inlet NJ out 20 nm', '73450' : 'Coastal waters from Sandy Hook to Manasquan Inlet NJ out 20 nm', '73338' : 'New York Harbor', '73353' : 'Fire Island Inlet NY to Moriches Inlet NY out 20 nm', '73350' : 'Moriches Inlet NY to Montauk Point NY out 20 nm', '73256' : 'Coastal Waters from Montauk NY to Marthas Vineyard extending out to 20 nm South of Block Island', '73340' : 'Peconic and Gardiners Bays', '73335' : 'Long Island Sound West of New Haven CT/Port Jefferson NY', '73235' : 'Rhode Island Sound', '73232' : 'Nantucket Sound', '73255' : 'Coastal Waters extending out to 25 nm South of Marthas Vineyard and Nantucket', '73254' : 'Coastal waters from Provincetown MA to Chatham MA to Nantucket MA out 20 nm', '73231' : 'Cape Cod Bay', '73230' : 'Boston Harbor', '73251' : 'Massachusetts Bay and Ipswich Bay', '73250' : 'Coastal waters east of Ipswich Bay and the Stellwagen Bank National Marine Sanctuary', '73153' : 'Casco Bay', '73152' : 'Coastal Waters from Port Clyde, ME to Cape Elizabeth, ME out 25 NM', '73150' : 'Coastal Waters from Stonington, ME to Port Clyde, ME out 25 NM', '73051' : 'Coastal Waters from Schoodic Point, ME to Stonington, ME out 25 NM', '73050' : 'Coastal Waters from Eastport, ME to Schoodic Point, ME out 25 NM', '73151' : 'Penobscot Bay', '73633' : 'Currituck Sound', '75250' : 'Coastal waters from Surf City to Cape Fear NC out 20 nm', '75254' : 'Coastal waters from Little River Inlet to Murrells Inlet SC out 20 nm', '75350' : 'Coastal waters from South Santee River to Edisto Beach SC out 20 nm', '75330' : 'Charleston Harbor', '75452' : 'Coastal waters from Fernandina Beach to St. Augustine FL out 20 NM', '75454' : 'Coastal waters from St. Augustine to Flagler Beach FL out 20 NM', '77765' : 'Coastal waters from Suwannee River to Keaton Beach out 20 NM', '77730' : 'Apalachee Bay or Coastal Waters From Keaton Beach to Ochlockonee River Fl out to 20 Nm', '77755' : 'Coastal Waters From Ochlockonee River to Apalachicola Fl out to 20 Nm', '77750' : 'Coastal waters from Apalachicola to Destin FL out 20 NM', '77530' : 'Lake Pontchartrain and Lake Maurepas', '77435' : 'Vermilion Bay', '77452' : 'Coastal waters from Intracoastal City to Cameron LA out 20 NM', '77432' : 'Calcasieu Lake', '77450' : 'Coastal waters from Cameron LA to High Island TX out 20 NM', '77430' : 'Sabine Lake', '57750' : 'Coastal Waters from San Mateo Point to the Mexican Border and out to 30 nm', '57565' : 'Coastal Waters from Point Pinos to Point Piedras Blancas California out to 10 nm', '57576' : 'Waters from Point Pinos to Point Piedras Blancas 10-60 NM', '57560' : 'Coastal Waters from Pigeon Point to Point Pinos California out to 10 nm', '57535' : 'Monterey Bay', '57545' : 'Coastal Waters from Point Reyes to Pigeon Point California out to 10 nm', '57530' : 'San Pablo Bay, Suisun Bay, the West Delta and the San Francisco Bay north of the Bay Bridge', '57540' : 'Coastal Waters from Point Arena to Point Reyes California out to 10 nm', '57570' : 'Waters from Point Arena to Point Reyes 10-60 NM', '57455' : 'Coastal waters from Cape Mendocino to Pt. Arena CA out 10 nm', '57475' : 'Waters from Cape Mendocino to Pt. Arena CA from 10 to 60 nm', '57450' : 'Coastal waters from Pt. St. George to Cape Mendocino CA out 10 nm', '57410' : 'Humboldt Bay Bar', '57470' : 'Waters from Pt. St. George to Cape Mendocino CA from 10 to 60 nm', '57376' : 'Waters from Cape Blanco OR to Pt. St. George CA from 10 to 60 nm', '57350' : 'Coastal waters from Florence to Cape Blanco OR out 10 nm', '57370' : 'Waters from Florence to Cape Blanco OR from 10 to 60 nm', '57255' : 'Coastal waters from Cascade Head to Florence OR out 10 nm', '57275' : 'Waters from Cascade Head to Florence OR from 10 to 60 nm', '57210' : 'Columbia River Bar', '57250' : 'Coastal waters from Cape Shoalwater WA to Cascade Head OR out 10 nm', '57270' : 'Waters from Cape Shoalwater WA to Cascade Head OR from 10 to 60 nm', '57156' : 'Coastal Waters From Point Grenville To Cape Shoalwater Out 10 Nm', '57176' : 'Coastal Waters From Point Grenville To Cape Shoalwater 10 To 60 Nm', '57173' : 'Waters From James Island To Point Grenville 10 To 60 Nm', '57150' : 'Coastal Waters From Cape Flattery To James Island Out 10 Nm', '57170' : 'Coastal Waters From Cape Flattery To James Island 10 To 60 Nm', '57130' : 'West Entrance U.S. Waters Strait of Juan De Fuca', '57131' : 'Central U.S. Waters Strait of Juan De Fuca', '57132' : 'East Entrance U.S. Waters Strait of Juan De Fuca', '57134' : 'Admiralty Inlet', '91144' : 'Two Harbors to Duluth MN', '91145' : 'Duluth MN to Port Wing WI', '91143' : 'Silver Bay Harbor to Two Harbors MN', '91142' : 'Taconite Harbor to Silver Bay Harbor MN', '91141' : 'Grand Marais to Taconite Harbor MN', '91140' : 'Grand Portage to Grand Marais MN', '91146' : 'Port Wing to Sand Island WI', '91147' : 'Sand Island to Bayfield WI', '91121' : 'Chequamegon Bay-Bayfield to Oak Point WI', '91148' : 'Oak Point to Saxon Harbor WI', '91240' : 'Saxon Harbor WI to Black River MI', '91241' : 'Black River To Ontonagon MI', '91242' : 'Ontonagon to Upper Entrance of Portage Canal MI', '91243' : 'Upper Entrance of Portage Canal to Eagle River MI', '91244' : 'Eagle River to Manitou Island MI', '91245' : 'Manitou Island to Point Isabelle MI', '91246' : 'Point Isabelle to Lower Entrance of Portage Canal MI', '91247' : 'Portage Lake to Huron Island MI to Lower Entrance of Portage Canal To Huron Islands MI Including Keweenaw and Huron Bays', '91248' : 'Huron Islands to Marquette MI', '91249' : 'Marquette to Munising MI', '91250' : 'Munising to Grand Marais MI', '91266' : 'Lake Superior East of a line from Manitou Island to Marquette MI and West of a line from Grand Marais MI to the US/Canadian Border Beyond 5 NM from shore', '91251' : 'Grand Marais to Whitefish Point MI', '91267' : 'Lake Superior from Grand Marais MI to Whitefish Point MI 5 NM off shore to the US/Canadian border', '91321' : 'Whitefish Bay (U.S. Portion)/Whitefish Point to Point Iroquois MI', '91322' : 'St. Marys River Point Iroquois to E. Potagannissing Bay', '92743' : 'Calumet Harbor IL to Gary IN', '92742' : 'Northerly Island to Calumet Harbor IL', '92741' : 'Wilmette Harbor to Northerly Island IL', '92740' : 'Winthrop Harbor to Wilmette Harbor IL', '92745' : 'Burns Harbor to Michigan City IN', '92043' : 'New Buffalo MI to St Joseph MI', '92779' : 'Lake Michigan from Wilmette Harbor to Michigan City in 5 NM offshore to Mid Lake', '92080' : 'Lake Michigan Michigan City IN to St. Joseph MI 5 NM offshore to mid-line of lake.', '92777' : 'Lake Michigan from Winthrop Harbor to Wilmette Harbor IL 5 NM offshore to Mid Lake', '92878' : 'Lake Michigan from St Joseph to South Haven MI 5 NM offshore to Mid Lake', '92646' : 'Wind Point WI to Winthrop Harbor IL', '92645' : 'North Point Light to Wind Point WI', '92644' : 'Port Washington to North Point Light WI', '92643' : 'Sheboygan to Port Washington WI', '92543' : 'Two Rivers to Sheboygan WI', '92845' : 'South Haven to Holland MI', '92675' : 'Lake Michigan from Wind Point WI to Winthrop Harbor IL 5 NM offshore to Mid Lake', '92876' : 'Lake Michigan from South Haven to Holland MI 5 NM offshore to Mid lake', '92673' : 'Lake Michigan from North Point Light to Wind Point WI 5 NM offshore to Mid Lake', '92671' : 'Lake Michigan from Port Washington to North Point Light WI 5 NM offshore to Mid Lake', '92669' : 'Lake Michigan from Sheboygan to Port Washington WI 5 NM offshore to Mid Lake', '92870' : 'Lake Michigan from Whitehall to Pentwater MI 5 NM offshore to Mid Lake', '92567' : 'Lake Michigan from Two Rivers to Sheboygan WI 5 NM offshore to Mid Lake', '92868' : 'Lake Michigan from Pentwater to Manistee MI 5 NM offshore to Mid Lake', '92542' : 'Sturgeon Bay to Two Rivers WI', '92565' : 'Lake Michigan from Sturgeon Bay to Two Rivers WI 5 NM offshore to Mid Lake', '92366' : 'Lake Michigan from Point Betsie to Manistee MI 5 NM offshore to Mid Lake', '92346' : 'Manistee to Point Betsie MI', '92744' : 'Gary to Burns Harbor IN', '92046' : 'Michigan City IN to New Buffalo MI', '92844' : 'St Joseph to South Haven MI', '92522' : 'Green Bay south of line from Oconto WI to Little Sturgeon Bay WI', '92521' : 'Green Bay south of line from Cedar River to Rock Island Passage and north of a line from Oconto WI to Little Sturgeon Bay WI', '92221' : 'Green Bay North of line from Cedar River MI to Rock Island Passage', '92541' : 'Rock Island Passage to Sturgeon Bay WI', '92250' : '5NM East of a line from Fairport MI to Rock Island Passage', '92563' : 'Lake Michigan from Rock Island Passage to Sturgeon Bay WI 5 NM offshore to mid lake', '92248' : 'Seul Choix Point to Point Detour MI', '92364' : 'Lake Michigan from Charlevoix to Point Betsie MI 5 NM Offshore to mid lake', '92261' : 'Lake Michigan from Seul Choix Point to Rock Island Passage 5 NM offshore to Mid Lake', '92362' : 'Lake Michigan South of a line from Seul Choix Point to the Mackinac Bridge and North of a line from Charlevoix MI to South Fox Island 5 NM offshore', '92344' : 'Sleeping Bear Point to Grand Traverse Light MI', '92342' : 'Norwood MI to 5 NM West of Mackinac Bridge including Little Traverse Bay', '93361' : 'Lake Huron from 5 NM east of Mackinac Bridge to Presque Isle Lt to the US/Canadian border beyond 5 NM from shore', '93362' : 'Lake Huron from Presque Isle Lt. to Sturgeon Point MI 5 NM off shore to US/Canadian border', '93346' : 'St Ignace to False Detour Channel', '93363' : 'Lake Huron from Sturgeon Point to Alabaster MI 5 NM off shore to US/Canadian border', '93349' : 'Sturgeon Pt to Alabaster MI', '93347' : '5NM East of Mackinac Bridge to Presque Isle Light MI including Bois Blanc Island', '93421' : 'Outer Saginaw Bay SW of Alabaster to Port Austin MI to Inner Saginaw Bay', '93441' : 'Port Austin to Harbor Beach MI', '93462' : 'Lake Huron from Port Austin to Harbor Beach 5 NM Off Shore to the US/Canadian border', '93442' : 'Harbor Beach to Port Sanilac MI', '93463' : 'Lake Huron from Harbor Beach to Port Sanilac 5 NM Off Shore to US/Canadian border', '93464' : 'Lake Huron from Port Sanilac to Port Huron 5 NM Off Shore to US/Canadian border', '94460' : 'Lake St. Clair Open Lake (U.S. Portion)', '94423' : 'Detroit River', '93443' : 'Port Sanilac to Port Huron MI', '96444' : 'Michigan Waters of Lake Erie from Detroit River to North Cape MI', '96142' : 'Maumee Bay to Reno Beach OH', '96162' : 'Detroit River Lt. to Maumee Bay OH to Reno Beach OH beyond 5 NM offshoreline to US-Canadian border', '96143' : 'Reno Beach to The Islands OH', '96144' : 'The Islands to Vermilion OH', '96163' : 'Reno Beach to The Islands OH beyond 5 NM off shoreline to US-Canadian border', '96145' : 'Vermilion to Avon Point OH', '96146' : 'Avon Point to Willowick OH', '96164' : 'The Islands to Vermilion OH beyond 5 nm off shoreline to US-Canadian border', '96165' : 'Vermilion to Avon Point OH beyond 5 nm off shoreline to US-Canadian border', '96166' : 'Avon Point to Willowick OH beyond 5 nm off shoreline to US-Canadian border', '96147' : 'Willowick to Geneva-on-the Lake OH', '96167' : 'Willowick to Geneva-on-the-Lake OH beyond 5 NM off shoreline to US-Canadian border', '96148' : 'Geneva-on-the-Lake to Conneaut OH', '96168' : 'Geneva-on-the-Lake to Conneaut OH beyond 5 nm off shoreline to US-Canadian border', '96149' : 'Conneaut OH to Ripley NY', '96169' : 'Conneaut OH to Ripley NY beyond 5 nm off shoreline to US-Canadian border', '96061' : 'Ripley to Buffalo NY extending from 5 NM off shoreline to US-Canadian border', '97042' : 'Niagara River to Hamlin Beach NY', '97062' : 'Niagara River to Hamlin Beach NY beyond 5 NM off shoreline to US-Canadian border', '97043' : 'Hamlin Beach to Sodus Bay NY', '97063' : 'Hamlin Beach to Sodus Bay NY beyond 5 NM off shoreline to US-Canadian border', '97044' : 'Sodus Bay to Mexico Bay NY', '97064' : 'Sodus Bay to Mexico Bay NY beyond 5 NM off shoreline to US-Canadian border', '97045' : 'Mexico Bay NY to the St. Lawrence River', '98022' : 'St. Lawrence River above Ogdensburg NY', '98024' : 'St. Lawrence River from Ogdensburg to St. Regis NY', '97065' : 'Mexico Bay NY to the St. Lawrence River beyond 5 NM off shoreline to US-Canadian border', '91162' : 'Lake Superior west of a line from Saxon Harbor WI to Grand Portage MN beyond 5NM', '91265' : 'Lake Superior West of Line from Manitou Island to Marquette MI Beyond 5 NM from shore', '93422' : 'Inner Saginaw Bay SW of Point Au Gres to Bay Port MI', '75450' : 'Coastal waters from Altamaha Sound to Fernandina Beach FL out 20 NM', '73052' : 'Intra Coastal Waters from Schoodic Point, ME to Stonington, ME', '73658' : 'Coastal waters from NC VA border to Currituck Beach Light NC out 20 nm', '77455' : 'Coastal waters from Lower Atchafalaya River to Intracoastal City LA out 20 NM', '73170' : 'Waters from Stonington ME to Merrimack River MA from 25 to 40 nm', '73271' : 'Ocean Waters from Provincetown to Nantucket from 20 to 35 NM offshore', '73070' : 'Waters from Eastport ME to Stonington (Deer Isle) ME from 25 to 40 nm', '73273' : 'Ocean Waters from Montauk NY to Marthas Vineyard from 25 to 40 NM offshore', '73272' : 'Ocean Waters from Marthas Vineyard to Nantucket from 25 to 45 NM offshore', '73370' : 'Waters from Montauk Point NY to Sandy Hook NJ from 20 to 40 nm', '73470' : 'Waters from Sandy Hook NJ to Fenwick Island DE from 20 to 40 nm', '73670' : 'Waters from Fenwick Island DE to Currituck Beach Light NC from 20 to 40 nm', '75370' : 'Waters from South Santee River SC to Savannah GA extending from 20 nm to 40 nm', '75470' : 'Waters from Altamaha Sound GA to Fernandina Beach FL from 20 to 60 NM', '75374' : 'Waters from Savannah GA to Altamaha Sound GA extending from 20 to 60 nm', '75472' : 'Waters from Fernandina Beach to St. Augustine FL from 20 to 60 NM', '75474' : 'Waters from St. Augustine to Flagler Beach FL from 20 to 60 NM', '57775' : 'Waters from San Mateo point to the Mexican Border Extending 30 to 60 nm out including San Clemente Island', '57310' : 'Coos Bay Bar', '77534' : 'Lake Borgne', '77557' : 'Coastal waters from Pascagoula Mississippi to Stake Island out 20 NM', '57330' : 'Chetco River Bar', '73656' : 'Coastal Waters from Cape Charles Light to Virginia-North Carolina border out to 20 nm', '73634' : 'Chesapeake Bay from Little Creek VA to Cape Henry VA including the Chesapeake Bay Bridge Tunnel', '73154' : 'Coastal Waters from Cape Elizabeth, ME to Merrimack River, MA out 25 NM', '57356' : 'Coastal waters from Cape Blanco OR to Pt. St. George CA out 10 nm', '94422' : 'St. Clair River', '73541' : 'Choptank River to Cambridge MD and the Little Choptank River', '96040' : 'Ripley to Dunkirk NY', '77532' : 'Mississippi Sound', '77536' : 'Chandeleur Sound', '77538' : 'Breton Sound', '73451' : 'Coastal waters from Manasquan Inlet to Little Egg Inlet NJ out 20 nm', '96020' : 'Upper Niagara River and Buffalo Harbor', '91263' : 'Lake Superior from Saxon Harbor WI to Upper Entrance to Portage Canal MI 5 NM off shore to the US/Canadian border including Isle Royal National Park', '91264' : 'Lake Superior from Upper Entrance to Portage Canal to Manitou Island MI 5 NM off shore to the US/Canadian Border', '92846' : 'Holland to Grand Haven MI', '92847' : 'Grand Haven to Whitehall MI', '92849' : 'Pentwater to Manistee MI', '92848' : 'Whitehall to Pentwater MI', '92874' : 'Lake Michigan from Holland to Grand Haven MI 5 NM offshore to Mid Lake', '92872' : 'Lake Michigan from Grand Haven to Whitehall MI 5 NM offshore to Mid Lake', '92341' : 'Seul Choix Point to 5 NM West of Mackinac Bridge', '93345' : 'Straits of Mackinac within 5 nm of Mackinac Bridge including Mackinac Island', '92345' : 'Point Betsie to Sleeping Bear Point MI', '92323' : 'Grand Traverse Bay south of a line Grand Traverse Light to Norwood MI', '93348' : 'Presque Isle Light to Sturgeon Pt MI Including Thunder Bay National Marine Sanctuary', '73543' : 'Tangier Sound and the inland waters surrounding Bloodsworth Island', '73330' : 'Long Island Sound East of New Haven CT/Port Jefferson NY', '57110' : 'Grays Harbor Bar', '96041' : 'Dunkirk to Buffalo NY', '75352' : 'Coastal waters from Edisto Beach SC to Savannah GA out 20 nm', '75354' : 'Coastal waters from Savannah GA to Altamaha Sound GA out 20 nm ...including Grays Reef National Marine Sanctuary', '77850' : 'Coastal waters from Tarpon Springs to Suwannee River FL out 20 NM', '77355' : 'Coastal waters from High Island to Freeport TX out 20 NM', '77350' : 'Coastal waters from Freeport to Matagorda Ship Channel TX out 20 NM', '77550' : 'Coastal Waters from Port Fourchon LA to Lower Atchafalaya River LA out 20 nm', '77770' : 'Waters from Apalachicola to Destin FL from 20 to 60 NM', '77775' : 'Waters from Suwannee River to Apalachicola FL from 20 to 60 NM', '77870' : 'Waters from Tarpon Springs to Suwannee River FL out 20 to 60 NM', '77575' : 'Coastal Waters from Stake Island LA to Southwest Pass of the Mississippi River from 20 to 60 nm', '77472' : 'Waters from Intracoastal City to Cameron LA from 20 to 60 NM', '77475' : 'Waters from Lower Atchafalaya River to Intracoastal City LA from 20 to 60 NM', '77570' : 'Coastal waters from Port Fourchon Louisiana to Lower Atchafalaya River LA from 20 to 60 NM', '77375' : 'Waters from High Island to Freeport TX from 20 to 60 NM', '77470' : 'Waters from Cameron LA to High Island TX from 20 to 60 NM', '77577' : 'Coastal waters from Pascagoula Mississippi to Stake Island Louisiana out 20 to 60 NM', '77572' : 'Coastal waters from Southwest Pass of the Mississippi River to Port Fourchon Louisiana from 20 to 60 NM', '77555' : 'Coastal Waters from Boothville LA to Southwest Pass of the Mississippi River out 20 nm', '77552' : 'Coastal waters from the Southwest Pass of the Mississippi River to Port Fourchon Louisiana out 20 NM', '65154' : 'Saipan Coastal Waters', '59121' : 'Alenuihaha Channel', '59117' : 'Maui County Windward Waters', '75256' : 'Coastal waters from Murrells Inlet to South Santee River SC out 20 nm', '97030' : 'Lower Niagara River', '77330' : 'Matagorda Bay', '73542' : 'Patuxent River to Broomes Island MD', '57153' : 'Coastal Waters From James Island To Point Grenville Out 10 Nm', '73355' : 'Sandy Hook NJ to Fire Island Inlet NY out 20 nm', '73345' : 'South Shore Bays from Jones Inlet through Shinnecock Bay', '73636' : 'York River', '77235' : 'Bays and Waterways from Port Aransas to Port O?Connor', '73635' : 'Rappahannock River from Urbanna to Windmill Point', '75670' : 'Waters from Jupiter Inlet to Deerfield Beach FL from 20 to 60 NM', '73638' : 'James River from James River Bridge to Hampton Roads Bridge-Tunnel', '57135' : 'Puget Sound and Hood Canal', '57133' : 'Northern Inland Waters Including The San Juan Islands', '77335' : 'Galveston Bay', '77655' : 'Coastal waters from Destin to Pensacola FL out 20 NM', '77650' : 'Coastal waters from Pensacola FL to Pascagoula MS out 20 NM', '77670' : 'Waters from Pensacola FL to Pascagoula MS from 20 to 60 NM', '77675' : 'Waters from Destin to Pensacola FL from 20 to 60 NM', '77631' : 'South Mobile Bay', '77632' : 'Mississippi Sound', '77634' : 'Pensacola Bay Area', '77635' : 'Choctawhatchee Bay', '77630' : 'North Mobile Bay', '57655' : 'Inner waters from Point Mugu to San Mateo Pt. CA including Santa Catalina and Anacapa Islands', '57650' : 'East Santa Barbara Channel from Pt. Conception to Pt. Mugu CA including Santa Cruz Island', '57676' : 'Outer waters from Santa Cruz Island to San Clemente Island to 60 NM offshore including San Nicolas and Santa Barbara Islands', '57673' : 'Waters from Pt. Sal to Santa Cruz Island CA and westward 60 nm including San Miguel and Santa Rosa Islands', '57645' : 'Point Piedras Blancas to Point Sal westward out to 10 NM', '77032' : 'Bayside and Gulf side from Craig Key to West End of Seven Mile Bridge', '75252' : 'Coastal waters from Cape Fear NC to Little River Inlet SC out 20 nm', '73637' : 'James River from Jamestown to the James River Bridge', '58245' : 'Flaxman Island to Demarcation Point', '58240' : 'Cape Halkett to Flaxman Island', '58235' : 'Point Franklin to Cape Halkett', '58230' : 'Cape Beaufort to Point Franklin', '58225' : 'Cape Thompson to Cape Beaufort', '58220' : 'Wales to Cape Thompson', '58215' : 'Kotzebue Sound', '58200' : 'Norton Sound', '58210' : 'Dall Point to Wales', '58180' : 'Kuskokwim Delta and Etolin Strait', '58170' : 'Cape Sarichef to Nikoski Bering Side', '58179' : 'Pribilof Islands Nearshore Waters', '58155' : 'Castle Cape to Cape Sarichef', '58150' : 'Sitkinak to Castle Cape', '58138' : 'Shelikof Strait', '58132' : 'Shuyak Island To Sitkinak', '58139' : 'Cook Inlet Kalgin Island to Point Bede', '58125' : 'Prince William Sound', '58128' : 'Valdez Arm', '58127' : 'Valdez Narrows', '58126' : 'Port of Valdez', '58052' : 'Icy Cape to Cape Suckling', '58051' : 'Cape Fairweather to Icy Cape', '58053' : 'Yakutat Bay', '58022' : 'Cross Sound', '58042' : 'Cape Decision to Cape Edgecumbe', '58041' : 'Dixon Entrance to Cape Decision', '58033' : 'Southern Chatham Strait', '58035' : 'Sumner Strait', '58034' : 'Frederick Sound', '58031' : 'Stephens Passage', '58032' : 'Northern Chatham Strait', '58021' : 'Icy Strait', '58011' : 'Glacier Bay', '58013' : 'Southern Lynn Canal', '58012' : 'Northern Lynn Canal', '58043' : 'Southeast Alaska Outside Waters From Cape Edgecumbe to Cape Fairweather', '58036' : 'Clarence Strait', '58129' : 'Western Prince William Sound', '58136' : 'Chiniak Bay', '58137' : 'Marmot Bay', '58119' : 'Cape Suckling to Cape Cleare', '58121' : 'Resurrection Bay', '58141' : 'Kachemak Bay', '58131' : 'Barren Islands East', '58171' : 'Unalaska Bay', '58140' : 'Cook Inlet North Kalgin Island', '58130' : 'West of Barren Islands Including Kamishak Bay', '58160' : 'Bristol Bay', '58181' : 'North and West of Nunivak Island', '58174' : 'Nikolski to Seguam Pacific Side', '58173' : 'Nikolski to Seguam Bering Side', '58176' : 'Seguam to Adak Pacific Side', '58175' : 'Seguam to Adak Bering Side', '58172' : 'Cape Sarichef to Nikoski Pacific Side', '58165' : 'Port Heiden to Cape Sarichef', '58120' : 'Cape Cleare to Gore Point', '58178' : 'Kiska to Attu', '58177' : 'Adak to Kiska', '58185' : 'St Matthew Island Waters', '77856' : 'Coastal waters from Bonita Beach to Englewood FL out 20 NM', '77836' : 'Charlotte Harbor and Pine Island Sound', '57575' : 'Waters from Pigeon Point to Point Pinos 10-60 NM', '57571' : 'Waters from Point Reyes to Pigeon Point 10-60 NM', '57531' : 'San Francisco Bay South of the Bay Bridge', '61152' : 'Coastal waters of Swain\'s Island', '75555' : 'Sebastian Inlet to Jupiter Inlet 0-20 nm', '75575' : 'Sebastian Inlet to Jupiter Inlet 20-60 nm', '75570' : 'Flagler Beach to Volusia-Brevard County Line 20-60 nm', '75550' : 'Flagler Beach to Volusia-Brevard County Line 0-20 nm', '75552' : 'Volusia-Brevard County Line to Sebastian Inlet 0-20 nm', '75572' : 'Volusia-Brevard County Line to Sebastian Inlet 20-60 nm', '77633' : 'Perdido Bay Area', '75152' : 'S of Oregon Inlet NC to Cape Hatteras NC out to 20 nm', '75156' : 'S of Ocracoke Inlet NC to Cape Lookout NC out to 20 nm', '75154' : 'S of Cape Hatteras NC to Ocracoke Inlet NC out to 20 nm', '75158' : 'S of Cape Lookout NC to Surf City NC out to 20 nm', '75170' : 'Waters from Currituck Beach Light to Surf City NC from 20 to 40 nm', '75150' : 'S of Currituck Beach Light NC to Oregon Inlet NC out to 20 nm', '75130' : 'Albemarle Sound', '75131' : 'Alligator River', '75136' : 'Pamlico and Pungo Rivers', '75137' : 'Neuse and Bay Rivers', '57670' : 'Point Piedras Blancas to Point Sal from 10 to 60 NM', '75270' : 'Waters from Surf City NC to South Santee River SC from 20 to 40 nm', '75135' : 'Pamlico Sound', '73270' : 'Ocean Waters from the Merrimack River to Plymouth from 40 to 60 NM offshore', '73234' : 'Buzzards Bay', '73233' : 'Vineyard Sound', '73236' : 'Narragansett Bay', '73237' : 'Block Island Sound', '75031' : 'Caribbean from 11N to 15N between 72W and 80W including Colombia Basin', '75013' : 'Caribbean N of 18N between 76W and 85W including Cayman Basin', '75037' : 'Tropical N Atlantic from 7N and 15N between 55W and 60W', '75121' : 'Atlantic from 22N to 27N between 65W and 70W', '75011' : 'Caribbean Nof 18N W of 85W including Yucatan Basin', '75017' : 'Gulf of Honduras', '75019' : 'Caribbean from 15N to 18N between 80W and 85W', '75021' : 'Caribbean from 15N to 18N between 72W and 80W', '75015' : 'Caribbean approaches to the Windward Passage', '75039' : 'SW Caribbean S of 11N including the approaches to the Panama Canal', '75029' : 'W Central Caribbean from 11N to 15N W of 80W', '75023' : 'Caribbean N of 15N between 64W and 72W', '75027' : 'Tropical N Atlantic from 15N to 19N between 55W and 60W', '75113' : 'Atlantic from 27N to 31N between 70W and 77W', '75125' : 'Atlantic S of 22N between 65W and 70W including Puerto Rico Trench', '75123' : 'Atlantic S of 22N W of 70W including approaches to the Windward Passage', '75025' : 'Offshore Waters Leeward Islands', '75035' : 'Offshore Waters Windward Islands including Trinidad and Tobago', '75117' : 'Bahamas including Cay Sal Bank', '75033' : 'Caribbean S of 15N between 64W and 72W including Venezuela Basin', '75127' : 'Atlantic from 19N to 22N between 55W and 65W', '75119' : 'Atlantic from 22N to 27N E of Bahamas to 70W', '75115' : 'Atlantic from 27N to 31N between 65W and 70W', '75111' : 'Atlantic from 27N to 31N W of 77W', '73805' : 'Georges Bank between Cape Cod and 68W north of 1000 FM', '73810' : 'South of New England between the Great South Channel and Montauk Point to 1000 FM', '73820' : 'Hudson Canyon to Baltimore Canyon to 1000 FM', '73925' : 'Baltimore Canyon to Hatteras Canyon between 100 NM and 250 NM offshore', '57900' : 'Cape Flattery to Cape Shoalwater between 150 NM and 250 NM offshore', '57915' : 'Florence, OR to Point St. George between 150 NM and 250 NM offshore', '57920' : 'Point St. George to Point Arena between 150 NM and 250 NM offshore', '57930' : 'Pigeon Point to Point Piedras Blancas between 150 NM and 250 NM offshore', '57940' : 'Santa Cruz Island, CA to 120W between 150 NM and 250 NM offshore', '77019' : 'Central Gulf from 22N to 26N between 87W and 94W', '77015' : 'NE Gulf N of 25N E of 87W', '77013' : 'N Central Gulf including Flower Garden Banks Marine Sanctuary', '77011' : 'NW Gulf including Stetson Bank', '77025' : 'E Bay of Campeche including Campeche Bank', '77017' : 'W Central Gulf from 22N to 26N W of 94W', '77021' : 'E Gulf from 22N to 25N E of 87W including Straits of Florida', '77023' : 'SW Gulf S of 22N W of 94W', '58510' : 'Eastern US Arctic Offshore', '58505' : 'Central US Arctic Offshore', '58500' : 'Western US Arctic Offshore', '58310' : 'Gulf of Alaska North of 55 Degrees North and East of 144 W', '58411' : 'Bering Sea Offshore West of 180 and East of the International Date Line', '58351' : 'Gulf of Alaska Offshore North of 57N and West of 144W', '58352' : 'Gulf of Alaska Offshore South of 57N North of 55N and West of 144W', '58413' : 'Bering Sea Offshore 171W to 180 and South of 56N', '58414' : 'Bering Sea Offshore East of 171W', '58412' : 'Bering Sea Offshore 171W to 180 and North of 56N', '59180' : 'Offshore Waters Within 240 nm of Honolulu', '73800' : 'Gulf of Maine to the Hague Line', '73900' : 'Georges Bank between 68W and the Hague Line', '73815' : 'South of Long Island between Montauk Point and Sandy Hook to 1000 FM', '73905' : 'East of 69W to the Hague Line between 1000 FM and 39N', '73910' : 'East of 69W and south of 39N to 250 NM offshore', '73920' : 'Baltimore Canyon to 69W east of 1000 FM and south of 38.5N to 250 NM offshore', '73915' : 'Between 1000FM and 38.5 N west of 69 W', '73825' : 'Baltimore Canyon to Cape Charles Light to 100 NM offshore', '73828' : 'Cape Charles Light to Currituck Beach Light to 100 NM offshore', '73833' : 'Cape Hatteras to Cape Fear to 100 NM Offshore.', '73830' : 'Currituck Beach Light to Cape Hatteras to 100 NM offshore', '73930' : 'Hatteras Canyon to Cape Fear between 100 NM and 250 NM offshore', '73835' : 'Cape Fear to 31N to 1000 FM', '73935' : 'Cape Fear to 31N east of 1000 FM to 250 NM offshore', '57840' : 'Santa Cruz Island, CA to San Clemente Island, CA between 60 NM and 150 NM offshore', '57835' : 'Point Piedras Blancas to Santa Cruz Island, CA between 60 NM and 150 NM offshore', '57935' : 'Point Piedras Blancas to Santa Cruz Island, CA between 150 NM and 250 NM offshore', '57800' : 'Cape Flattery to Cape Shoalwater between 60 NM and 150 NM offshore', '57905' : 'Cape Shoalwater to Cape Lookout between 150 NM and 250 NM offshore', '57805' : 'Cape Shoalwater to Cape Lookout between 60 NM and 150 NM offshore', '57910' : 'Cape Lookout to Florence, OR between 150 NM and 250 NM offshore', '57810' : 'Cape Lookout to Florence, OR between 60 NM and 150 NM offshore', '57815' : 'Florence, OR to Point St. George between 60 NM and 150 NM offshore', '57820' : 'Point St. George to Point Arena between 60 NM and 150 NM offshore', '57925' : 'Point Arena to Pigeon Point between 150 NM and 250 NM offshore', '57825' : 'Point Arena to Pigeon Point between 60 NM and 150 NM offshore', '57830' : 'Pigeon Point to Point Piedras Blancas between 60 NM and 150 NM offshore', '57945' : 'San Clemente Island, CA to Guadalupe Island from 60 NM offshore west to 120W', 'XXXXX' : 'TEST', } CA_SAME_CODE={ 'LOCATION' : 'CA', #Nova Scotia '11100' : 'Kings County', '11200' : 'Annapolis County', '11300' : 'Digby County', '11400' : 'Lunenburg County', '11500' : 'Queens County', '11600' : 'Shelburne County', '11700' : 'Yarmouth County', '12100' : 'Cumberland County - Minas Shore', '12200' : 'Cumberland County North and Cobequid Pass', '12300' : 'Colchester County - Cobequid Bay', '12400' : 'Colchester County North', '12500' : 'Hants County', '12600' : 'Colchester County - Truro and south', '12700' : 'Halifax Metro and Halifax County West', '12800' : 'Halifax County - east of Porters Lake', '13100' : 'Pictou County', '13200' : 'Guysborough County', '13300' : 'Antigonish County', '14100' : 'Richmond County', '14200' : 'Inverness County - south of Mabou', '14300' : 'Inverness County - Mabou and north', '14400' : 'Victoria County', '14500' : 'Sydney Metro and Cape Breton County', #New Brunswick '15100' : 'Fredericton and southern York County', '15200' : 'Oromocto and Sunbury County', '15300' : 'Grand Lake and Queens County', '15400' : 'Sussex/Kennebecasis Valley and Kings County', '15500' : 'Saint John and County', '15600' : 'St. Stephen and northern Charlotte County', '15700' : 'Grand Manan and coastal Charlotte County', '16100' : 'Woodstock and Carleton County', '16200' : 'Stanley-Doaktown-Blackville Area', '16300' : 'Kent County', '16400' : 'Moncton and southeast New Brunswick', '16500' : 'Kouchibouguac National Park', '16600' : 'Fundy National Park', '17100' : 'Edmundston and Madawaska County', '17200' : 'Campbellton and Restigouche County', #sub-regions '17210' : 'Western half of Restigouche County', '17220' : 'Campbellton and eastern half of Restigouche County', '17300' : 'Grand Falls and Victoria County', '17400' : 'Mount Carleton-Renous Highway', '17500' : 'Bathurst and Chaleur Region', '17600' : 'Miramichi and Area', '17700' : 'Acadian Peninsula', #Prince Edward Island '18100' : 'Kings County', '18200' : 'Prince County', '18300' : 'Queens County', #Newfoundland and Labrador '21100' : 'Avalon Peninsula North', '21200' : 'Avalon Peninsula Southeast', '21300' : 'St. John\'s and vicinity', #sub-regions '21310' : 'North of La Manche', '21320' : 'La Manche and south', '21400' : 'Avalon Peninsula Southwest', '22100' : 'Burin Peninsula', '22200' : 'Connaigre', '22300' : 'Burgeo to Ramea', '22400' : 'Channel-Port aux Basques and vicinity', #sub-regions '22410' : 'South Coast', '22420' : 'West Coast', '23100' : 'Corner Brook and vicinity', '23200' : 'Deer Lake - Humber Valley', '23300' : 'Gros Morne', '23400' : 'Bay St. George', '24100' : 'Buchans and the Interior', '24200' : 'Grand Falls-Windsor and vicinity', '25100' : 'Bay of Exploits', '25200' : 'Bonavista North', '25300' : 'Bonavista Peninsula', '25400' : 'Clarenville and vicinity', '25500' : 'Gander and vicinity', '25600' : 'Green Bay - White Bay', '25700' : 'Terra Nova', '26100' : 'Northern Peninsula East', '26200' : 'Parson\'s Pond - Hawke\'s Bay', '26300' : 'Port Saunders and the Straits', '27100' : 'Red Bay to L\'Anse-au-Clair', '27200' : 'Norman Bay to Lodge Bay', '27300' : 'Cartwright to Black Tickle', '27400' : 'Rigolet and vicinity', '27500' : 'Postville - Makkovik', '27600' : 'Hopedale and vicinity', '27700' : 'Nain and vicinity', '28100' : 'Upper Lake Melville', '28200' : 'Eagle River', '28300' : 'Churchill Valley', '29100' : 'Churchill Falls and vicinity', '29200' : 'Labrador City and Wabush', #Quebec '31100' : 'Gatineau', '31200' : 'Waskaganish', '31300' : 'Matagami', #sub-regions '31310' : 'Joutel - Matagami area', '31320' : 'Miquelon - Desmaraisville area', '31400' : 'Abitibi', #sub-regions '31410' : 'La Sarre area', '31420' : 'Amos area', '31430' : 'Lebel-sur-Quevillon area', '31440' : 'Senneterre area', '31450' : 'Rouyn area', '31460' : 'Malartic area', '31470' : 'Val d\'Or - Louvicourt area', '31500' : 'Temiscamingue', #sub-regions '31510' : 'Ville-Marie area', '31520' : 'Temiscaming area', '31600' : 'Reserve Faunique La Verendrye', #sub-regions '31610' : 'Dorval Lodge area', '31620' : 'Le Domaine area', '31700' : 'Pontiac', #sub-regions '31710' : 'Rapides-des-Joachims area', '31720' : 'Fort William - Shawville area', '31800' : 'Mont-Laurier', #sub-regions '31810' : 'Mont-Laurier area', '31820' : 'La Minerve - Rouge River area', '31900' : 'Upper Gatineau-Lievre-Papineau', #sub-regions '31910' : 'Low - Wakefield area', '31920' : 'Maniwaki - Gracefield area', '31930' : 'Papineauville - Cheneville area', '31940' : 'Papineau-Labelle Reserve area', '31950' : 'Lievre River area', '32100' : 'Parc du Mont-Tremblant - Saint-Michel-des-Saints', #sub-regions '32110' : 'Saint-Michel-des-Saints area', '32120' : 'Saint-Donat - Parc du Mont-Tremblant area', '32200' : 'Laurentides', #sub-regions '32210' : 'Mont-Tremblant - Sainte-Agathe area', '32220' : 'Sainte-Adele - Saint-Sauveur area', '32300' : 'Lachute-Saint-Jerome', #sub-regions '32310' : 'Saint-Jerome area', '32320' : 'Lachute area', '32330' : 'Saint-Eustache area', '32400' : 'Montreal Metropolitain - Laval', #sub-regions '32410' : 'Longueuil - Varennes area', '32420' : 'Laval area', '32430' : 'Montreal Island area', '32440' : 'Chateauguay - La Prairie area', '32500' : 'Vaudreuil - Soulanges - Huntingdon', #sub-regions '32510' : 'Soulanges area', '32520' : 'Vaudreuil area', '32530' : 'Saint-Remi area', '32540' : 'Valleyfield - Beauharnois area', '32550' : 'Hemmingford area', '32560' : 'Huntingdon area', '32600' : 'Lanaudiere', #sub-regions '32610' : 'Berthierville - Saint-Gabriel area', '32620' : 'Rawdon - Joliette area', '32630' : 'Mascouche area', '32700' : 'Mauricie', #sub-regions '32710' : 'Louiseville area', '32720' : 'Matawin - Mauricie National Park area', '32730' : 'Lac-aux-Sables area', '32740' : 'Shawinigan area', '32750' : 'Sainte-Anne-de-la-Perade area', '32760' : 'Trois-Rivieres area', '32800' : 'Quebec', #sub-regions '32811' : 'Portneuf area', '32812' : 'Valcartier - Stoneham area', '32813' : 'Quebec area', '32814' : 'Cote-de-Beaupre - L\'Ile d\'Orleans area', '32821' : 'Bellechasse area', '32822' : 'Levis area', '32823' : 'Saint-Lambert area', '32824' : 'Lotbiniere area', '33100' : 'James-Bay and La-Grande-Riviere', '33200' : 'LG-Quatre - Laforge and Fontanges', '33300' : 'Schefferville', '33400' : 'Fermont', '34100' : 'Chibougamau', '34200' : 'Parent-Reservoir Gouin', #sub-regions '34210' : 'Gouin Reservoir area', '34220' : 'Parent area', '34300' : 'Lac-Saint-Jean', #sub-regions '34310' : 'Ashuapmushuan Wildlife Reserve area', '34320' : 'Normandin - Peribonka area', '34330' : 'Alma - Desbiens area', '34340' : 'Saint-Felicien - Roberval area', '34400' : 'La Tuque', #sub-regions '34410' : 'Lake Bouchette area', '34420' : 'La Tuque area', '34500' : 'Riviere Manicouagan', #sub-regions '34510' : 'Manic-5 area', '34520' : 'Manic-3 area', '34530' : 'Chutes-des-Passes - Pipmuacan Reservoir area', '34540' : 'Labrieville area', '34600' : 'Les Escoumins-Forestville', #sub-regions '34610' : 'Forestville area', '34620' : 'Les Escoumins area', '34700' : 'Saguenay', #sub-regions '34710' : 'Falardeau - Mont-Valin area', '34720' : 'Sainte-Rose-du-Nord area', '34730' : 'Ville de Saguenay area', '34740' : 'Riviere-eternite - Petit-Saguenay area', '34750' : 'Tadoussac - Baie-Sainte-Catherine area', '34800' : 'Charlevoix', #sub-regions '34810' : 'La Malbaie area', '34820' : 'Baie-Saint-Paul area', '34900' : 'Reserve Faunique des Laurentides', #sub-regions '34910' : 'Apica Mountain area', '34920' : 'Grands-Jardins Park area', '34930' : 'l\'etape area', '35100' : 'Rimouski - Mont-Joli', #sub-regions '35110' : 'Mont-Joli area', '35120' : 'Le Bic - Rimouski area', '35130' : 'Rimouski Wildlife Reserve area', '35200' : 'Matane', '35300' : 'Amqui-Vallee de la Matapedia', #sub-regions '35310' : 'Amqui area', '35320' : 'Matapedia area', '35400' : 'Sainte-Anne-des-Monts - Grande-Vallee', #sub-regions '35410' : 'Grande-Vallee area', '35420' : 'Sainte-Anne-des-Monts area', '35500' : 'Parc National de la Gaspesie - Murdochville', #sub-regions '35510' : 'Murdochville area', '35520' : 'Mont-Albert area', '35530' : 'Grande-Riviere - Cascapedia area', '35600' : 'Restigouche-Bonaventure', #sub-regions '35610' : 'New-Richmond - Bonaventure area', '35620' : 'Restigouche - Carleton area', '35700' : 'New-Carlisle - Chandler', #sub-regions '35710' : 'Chandler area', '35720' : 'New Carlisle - Port-Daniel area', '35800' : 'Parc National Forillon - Gaspe - Perce', #sub-regions '35810' : 'Parc National Forillon - Gaspe area', '35820' : 'Perce area', '36100' : 'Baie-Comeau', '36200' : 'Sept-Iles - Port-Cartier', #sub-regions '36210' : 'Baie-Trinite area', '36220' : 'Sept-Iles - Port-Cartier area', '36300' : 'Minganie', '36400' : 'Natashquan', '36500' : 'Chevery', '36600' : 'Blanc-Sablon', '36700' : 'Anticosti', '36800' : 'Iles-de-la-Madeleine', '37100' : 'Vallee du Richelieu-Saint-Hyacinthe', #sub-regions '37110' : 'Sorel - Yamaska area', '37120' : 'Saint-Hyacinthe - Acton Vale area', '37130' : 'Vercheres - Beloeil area', '37140' : 'Saint-Jean - Marieville area', '37150' : 'Lacolle area', '37200' : 'Drummondville - Bois-Francs', #sub-regions '37210' : 'Becancour - Villeroy area', '37220' : 'Victoriaville area', '37230' : 'Nicolet area', '37240' : 'Drummondville area', '37300' : 'Eastern Townships', #sub-regions '37311' : 'Granby - Waterloo area', '37312' : 'Brome-Missisquoi area', '37321' : 'Richmond area', '37322' : 'Weedon area', '37323' : 'Thetford Mines area', '37331' : 'Mont-Orford - Lake Memphremagog area', '37332' : 'Sherbrooke area', '37333' : 'Coaticook area', '37334' : 'Lac-Megantic area', '37335' : 'Cookshire area', '37400' : 'Beauce', #sub-regions '37410' : 'Lac-Etchemin area', '37420' : 'Sainte-Marie-de-Beauce area', '37430' : 'Saint-Georges-de-Beauce area', '37500' : 'Montmagny - L\'Islet', #sub-regions '37510' : 'Montmagny - Saint-Jean-Port-Joli area', '37520' : 'Saint-Fabien - Saint-Pamphile area', '37600' : 'Kamouraska - Riviere-du-Loup - Trois-Pistoles', #sub-regions '37610' : 'Trois-Pistoles area', '37620' : 'Pohenegamook area', '37630' : 'Kamouraska area', '37640' : 'Riviere-du-Loup area', '37700' : 'Temiscouata', '38100' : 'Ivujivik', '38200' : 'Akulivik', '38300' : 'Puvirnituq', '38400' : 'Inukjuak', '38500' : 'Umiujaq', '38600' : 'Kuujjuarapik', '39100' : 'Salluit - Raglan Lake', '39200' : 'Kangiqsujuaq', '39300' : 'Quaqtaq', '39400' : 'Kangirsuk', '39500' : 'Aupaluk', '39600' : 'Tasiujaq', '39700' : 'Kuujjuaq', '39800' : 'Kangiqsualujjua', #Ontario '41100' : 'Simcoe - Delhi - Norfolk', '41200' : 'Dunnville - Caledonia - Haldimand', '41300' : 'Sarnia - Lambton', #sub-regions '41310' : 'Watford - Pinery Park - Eastern Lambton County', '41320' : 'Sarnia - Petrolia - Western Lambton County', '41400' : 'Windsor - Essex - Chatham-Kent', #sub-regions '41410' : 'Chatham-Kent - Rondeau Park', '41420' : 'Windsor - Leamington - Essex County', '41500' : 'London - Middlesex', #sub-regions '41510' : 'London - Parkhill - Eastern Middlesex County', '41520' : 'Strathroy - Komoka - Western Middlesex County', '41600' : 'Elgin', #sub-regions '41610' : 'St. Thomas - Aylmer - Eastern Elgin County', '41620' : 'Rodney - Shedden - Western Elgin County', '41700' : 'Oxford - Brant', #sub-regions '41710' : 'Woodstock - Tillsonburg - Oxford County', '41720' : 'Brantford - County of Brant', '42100' : 'Attawapiskat', '42200' : 'Fort Severn', '42300' : 'Peawanuck', '42400' : 'Big Trout Lake - Sachigo Lake', #sub-regions '42410' : 'Sachigo Lake - Bearskin Lake', '42420' : 'Big Trout Lake - Kasabonika', '42500' : 'Sandy Lake - Pikangikum', #sub-regions '42510' : 'Sandy Lake - Weagamow Lake - Deer Lake', '42520' : 'Pikangikum - Poplar Hill - MacDowell', '42600' : 'Pickle Lake - Wunnummin Lake', #sub-regions '42610' : 'Summer Beaver - Wunnummin Lake - Kingfisher Lake', '42620' : 'Pickle Lake - Cat Lake', '42700' : 'Fort Hope - Webequie', #sub-regions '42710' : 'Webequie', '42720' : 'Fort Hope - Lansdowne House - Ogoki', '43100' : 'City of Toronto', '43200' : 'York - Durham', #sub-regions '43210' : 'Uxbridge - Beaverton - Northern Durham Region', '43220' : 'Newmarket - Georgina - Northern York Region', '43230' : 'Pickering - Oshawa - Southern Durham Region', '43240' : 'Vaughan - Richmond Hill - Markham', '43300' : 'Belleville - Quinte - Northumberland', #sub-regions '43310' : 'Belleville - Quinte West - Eastern Northumberland County', '43320' : 'Cobourg - Colborne - Western Northumberland County', '43400' : 'Stirling - Tweed - South Frontenac', #sub-regions '43410' : 'Tamworth - Sydenham - South Frontenac', '43420' : 'Stirling - Tweed - Madoc', '43500' : 'Kingston - Prince Edward', #sub-regions '43510' : 'Kingston - Odessa - Frontenac Islands', '43520' : 'Napanee - Consecon', '43530' : 'Picton - Sandbanks Park', '43600' : 'Brockville - Leeds and Grenville', #sub-regions '43610' : 'Merrickville-Wolford - Kemptville', '43620' : 'Westport - Charleston Lake', '43630' : 'Brockville - Prescott', '43640' : 'Gananoque - Mallorytown', '43700' : 'Cornwall - Morrisburg', #sub-regions '43710' : 'Maxville - Alexandria', '43720' : 'Cornwall - Lancaster', '43730' : 'Winchester - Newington', '43740' : 'Morrisburg - Long Sault', '44100' : 'Barrie - Orillia - Midland', #sub-regions '44110' : 'Midland - Coldwater - Orr Lake', '44120' : 'Orillia - Lagoon City - Washago', '44130' : 'Barrie - Collingwood - Hillsdale', '44200' : 'Burk\'s Falls - Bayfield Inlet', #sub-regions '44210' : 'South River - Burk\'s Falls', '44220' : 'Bayfield Inlet - Dunchurch', '44300' : 'Algonquin', #sub-regions '44310' : 'Deep River - Whitney - Eastern Algonquin Park', '44320' : 'Western Algonquin Park - Lake of Two Rivers', '44400' : 'Parry Sound - Muskoka', #sub-regions '44410' : 'Huntsville - Baysville', '44420' : 'Town of Parry Sound - Rosseau - Killbear Park', '44430' : 'Port Carling - Port Severn', '44440' : 'Bracebridge - Gravenhurst', '44500' : 'Haliburton', #sub-regions '44510' : 'Oxtongue Lake - Fort Irwin - Northern Haliburton County', '44520' : 'Haliburton - Minden - Southern Haliburton County', '44600' : 'Bancroft - Bon Echo Park', #sub-regions '44610' : 'Bancroft - Hastings Highlands - Denbigh', '44620' : 'Kaladar - Bannockburn - Bon Echo Park', '44700' : 'Peterborough - Kawartha Lakes', #sub-regions '44710' : 'Fenelon Falls - Balsam Lake Park - Northern Kawartha Lakes', '44720' : 'Apsley - Woodview - Northern Peterborough County', '44730' : 'Lindsay - Southern Kawartha Lakes', '44740' : 'Peterborough City - Lakefield - Southern Peterborough County', '45100' : 'Prescott and Russell', '45200' : 'Renfrew - Pembroke - Barry\'s Bay', #sub-regions '45210' : 'Petawawa - Pembroke - Cobden', '45220' : 'Barry\'s Bay - Killaloe', '45230' : 'Renfrew - Arnprior - Calabogie', '45300' : 'Smiths Falls - Lanark - Sharbot Lake', #sub-regions '45310' : 'Smiths Falls - Perth - Eastern Lanark County', '45320' : 'Plevna - Sharbot Lake - Western Lanark County', '45400' : 'City of Ottawa', #sub-regions '45410' : 'Ottawa North - Kanata - Orleans', '45420' : 'Ottawa South - Richmond - Metcalfe', '46100' : 'City of Hamilton', '46200' : 'Grey - Bruce', #sub-regions '46210' : 'Bruce Peninsula - Sauble Beach - Tobermory', '46220' : 'Owen Sound - Blue Mountains - Northern Grey County', '46230' : 'Saugeen Shores - Kincardine - Southern Bruce County', '46240' : 'Hanover - Dundalk - Southern Grey County', '46300' : 'Huron - Perth', #sub-regions '46310' : 'Wingham - Blyth - Northern Huron County', '46320' : 'Listowel - Milverton - Northern Perth County', '46330' : 'Goderich - Bluewater - Southern Huron County', '46340' : 'Stratford - Mitchell - Southern Perth County', '46400' : 'Waterloo - Wellington', #sub-regions '46410' : 'Mount Forest - Arthur - Northern Wellington County', '46420' : 'Guelph - Erin - Southern Wellington County', '46430' : 'Kitchener - Cambridge - Region of Waterloo', '46500' : 'Dufferin - Innisfil', #sub-regions '46510' : 'Innisfil - New Tecumseth - Angus', '46520' : 'Shelburne - Mansfield - Northern Dufferin County', '46530' : 'Orangeville - Grand Valley - Southern Dufferin County', '46600' : 'Halton - Peel', #sub-regions '46610' : 'Caledon', '46620' : 'Mississauga - Brampton', '46630' : 'Halton Hills - Milton', '46640' : 'Burlington - Oakville', '46700' : 'Niagara', #sub-regions '46710' : 'St. Catharines - Grimsby - Northern Niagara Region', '46720' : 'Niagara Falls - Welland - Southern Niagara Region', '47100' : 'Red Lake - Ear Falls', #sub-regions '47110' : 'Red Lake - Woodland Caribou Park', '47120' : 'Ear Falls - Perrault Falls - Western Lac Seul', '47200' : 'Sioux Lookout - Savant Lake', #sub-regions '47210' : 'Savant Lake - Sturgeon Lake', '47220' : 'Sioux Lookout - Eastern Lac Seul', '47300' : 'Kenora - Nestor Falls', #sub-regions '47310' : 'Kenora - Grassy Narrows - Whitedog', '47320' : 'Sioux Narrows - Nestor Falls - Morson', '47400' : 'Dryden - Ignace', #sub-regions '47410' : 'Dryden - Vermilion Bay', '47420' : 'Ignace - English River', '47500' : 'Fort Frances - Rainy Lake', #sub-regions '47510' : 'Fort Frances - Emo - Rainy River', '47520' : 'Seine River Village - Mine Centre', '48100' : 'City of Thunder Bay', '48200' : 'Lake Nipigon - Wabakimi', #sub-regions '48210' : 'Armstrong - Auden - Wabakimi Park', '48220' : 'Beardmore - Jellicoe - Macdiarmid', '48230' : 'Gull Bay - Black Sturgeon Lake', '48300' : 'Geraldton - Manitouwadge - Hornepayne', #sub-regions '48310' : 'Nakina - Aroland - Pagwa', '48320' : 'Geraldton - Longlac - Caramat', '48330' : 'Manitouwadge - Hornepayne', '48400' : 'Atikokan - Upsala - Quetico', #sub-regions '48410' : 'Upsala - Raith', '48420' : 'Atikokan - Shebandowan - Quetico Park', '48500' : 'Superior West', #sub-regions '48510' : 'Cloud Bay - Dorion', '48520' : 'Kakabeka Falls - Whitefish Lake - Arrow Lake', '48600' : 'Nipigon - Marathon - Superior North', #sub-regions '48610' : 'Nipigon - Rossport', '48620' : 'Marathon - Schreiber', '48700' : 'Wawa - White River - Pukaskwa', #sub-regions '48710' : 'White River - Dubreuilville', '48720' : 'Wawa - Pukaskwa Park', '48800' : 'Sault Ste. Marie - Superior East', #sub-regions '48810' : 'Agawa - Lake Superior Park', '48820' : 'Searchmont - Montreal River Harbour - Batchawana Bay', '48830' : 'Sault Ste. Marie - St. Joseph Island', '49100' : 'Greater Sudbury and vicinity', '49200' : 'Elliot Lake - Ranger Lake', '49300' : 'Moosonee - Fort Albany', #sub-regions '49310' : 'Fort Albany', '49320' : 'Moosonee', '49400' : 'Kapuskasing - Hearst', #sub-regions '49410' : 'Fraserdale - Pledger Lake', '49420' : 'Kapuskasing - Hearst - Smooth Rock Falls', '49500' : 'Timmins - Cochrane', #sub-regions '49510' : 'Little Abitibi - Kesagami Lake', '49520' : 'Timmins - Cochrane - Iroquois Falls', '49600' : 'Chapleau - Gogama', #sub-regions '49610' : 'Chapleau - Missinaibi Lake', '49620' : 'Gogama - Foleyet', '49700' : 'Kirkland Lake - New Liskeard - Temagami', #sub-regions '49710' : 'Kirkland Lake - Englehart', '49720' : 'New Liskeard - Temagami', '49800' : 'North Bay - West Nipissing', #sub-regions '49810' : 'West Nipissing - French River', '49820' : 'North Bay - Powassan - Mattawa', '49900' : 'Manitoulin - Northshore - Killarney', #sub-regions '49910' : 'Blind River - Thessalon', '49920' : 'Espanola - Killarney', '49930' : 'Manitoulin Island', #Manitoba '51100' : 'Virden - Souris', #sub-regions '51110' : 'R.M. of Wallace', '51120' : 'R.M. of Woodworth', '51130' : 'R.M. of Daily', '51140' : 'R.M. of Whitehead', '51150' : 'R.M. of Sifton', '51160' : 'R.M. of Pipestone', '51170' : 'R.M. of Glenwood', '51200' : 'Brandon - Carberry - Treherne', #sub-regions '51211' : 'R.M. of Elton', '51212' : 'R.M. of Cornwallis', '51213' : 'R.M. of Oakland', '51220' : 'City of Brandon', '51231' : 'R.M. of North Cypress', '51232' : 'R.M. of South Cypress', '51241' : 'R.M. of North Norfolk', '51242' : 'R.M. of South Norfolk', '51243' : 'R.M. of Victoria', '51300' : 'Melita - Boissevain - Turtle Mountain Provincial Park', #sub-regions '51311' : 'R.M. of Albert', '51312' : 'R.M. of Cameron', '51313' : 'R.M. of Whitewater', '51321' : 'R.M. of Edward', '51322' : 'R.M. of Arthur', '51331' : 'R.M. of Brenda', '51332' : 'R.M. of Winchester', '51340' : 'R.M. of Morton', '51400' : 'Killarney - Pilot Mound - Manitou', #sub-regions '51411' : 'R.M. of Riverside', '51412' : 'R.M. of Turtle Mountain', '51421' : 'R.M. of Strathcona', '51422' : 'R.M. of Argyle', '51431' : 'R.M. of Roblin', '51432' : 'R.M. of Louise', '51441' : 'R.M. of Lorne', '51442' : 'R.M. of Pembina', '52100' : 'City of Winnipeg', '52200' : 'Selkirk - Gimli - Stonewall - Woodlands - Eriksdale', #sub-regions '52211' : 'R.M. of Eriksdale', '52212' : 'R.M. of Coldwell', '52221' : 'R.M. of Armstrong (north)', '52222' : 'R.M. of Armstrong (south)', '52223' : 'R.M. of Gimli', '52231' : 'R.M. of St. Laurent', '52232' : 'R.M. of Woodlands', '52241' : 'R.M. of Rockwood (Teulon)', '52242' : 'R.M. of St. Andrews (Dunnottar)', '52243' : 'R.M. of Rockwood (Stonewall)', '52244' : 'R.M. of St. Andrews (St. Andrews)', '52251' : 'R.M. of Rosser', '52252' : 'R.M. of West St. Paul', '52300' : 'Portage la Prairie - Headingley - Brunkild - Carman', #sub-regions '52311' : 'R.M. of Portage la Prairie (St. Ambroise)', '52312' : 'R.M. of Portage la Prairie (Portage la Prairie)', '52321' : 'R.M. of St Francois Xavier', '52322' : 'R.M. of Cartier', '52323' : 'R.M. of Headingley', '52331' : 'R.M. of Grey', '52332' : 'R.M. of Dufferin', '52340' : 'R.M. of MacDonald', '52400' : 'Dugald - Beausejour - Grand Beach', #sub-regions '52411' : 'R.M. of Alexander', '52412' : 'R.M. of St. Clements (Grand Beach and Brokenhead First Nation)', '52421' : 'R.M. of St. Clements (Libau)', '52422' : 'R.M. of Brokenhead', '52431' : 'R.M. of Springfield', '52432' : 'R.M. of Victoria Beach', '52434' : 'R.M. of East St. Paul', '52500' : 'Morden - Winkler - Altona - Emerson - Morris', #sub-regions '52510' : 'R.M. of Morris', '52520' : 'R.M. of Thompson', '52530' : 'R.M. of Roland', '52540' : 'R.M. of Montcalm', '52550' : 'R.M. of Rhineland', '52560' : 'R.M. of Stanley', '52600' : 'Steinbach - St. Adolphe - Dominion City - Vita - Richer', #sub-regions '52611' : 'R.M. of Ritchot', '52612' : 'R.M. of Tache', '52620' : 'R.M. of De Salaberry', '52630' : 'R.M. of Hanover', '52641' : 'R.M. of Ste. Anne', '52642' : 'R.M. of La Broquerie', '52651' : 'R.M. of Franklin', '52652' : 'R.M. of Stuartburn', '53100' : 'Bissett - Nopiming Provincial Park - Pine Falls', #sub-regions '53110' : 'Manigotagan, Black River First Nation and Hollow Water First Nation', '53120' : 'Bissett', '53130' : 'Nopiming Provincial Park', '53140' : 'R.M. of Alexander (Pine Falls and Great Falls)', '53150' : 'R.M. of Alexander west of Great Falls', '53200' : 'Whiteshell - Lac Du Bonnet - Pinawa', #sub-regions '53211' : 'R.M. of Lac Du Bonnet', '53212' : 'R.M. of Reynolds', '53221' : 'L.G.D. of Pinawa', '53222' : 'R.M. of Whitemouth', '53231' : 'Point du Bois', '53232' : 'Falcon Lake and West Hawk Lake', '53241' : 'R.M. of Reynolds north of Highway 1', '53242' : 'R.M. of Reynolds south of Highway 1', '53243' : 'Shoal Lake First Nation', '53300' : 'Sprague - Northwest Angle Provincial Forest', #sub-regions '53310' : 'Buffalo Point First Nation and Northwest Angle Provincial Forest', '53320' : 'R.M. of Piney (west)', '53330' : 'R.M. of Piney (east)', '54100' : 'Grand Rapids', #sub-regions '54110' : 'Grand Rapids and Easterville', '54120' : 'Waterhen, Meadow Portage and Skownan', '54200' : 'Arborg - Hecla - Fisher River - Gypsumville - Ashern', #sub-regions '54211' : 'R.M. of Grahamdale (north)', '54212' : 'R.M. of Grahamdale (central)', '54221' : 'Jackhead First Nation', '54222' : 'R.M. of Fisher (north)', '54231' : 'R.M. of Siglunes', '54232' : 'R.M. of Grahamdale (south)', '54241' : 'R.M. of Fisher (south)', '54242' : 'R.M. of Bifrost', '54243' : 'Moose Creek Provincial Forest, Pine Dock and Matheson Island', '55100' : 'Dauphin - Roblin - Winnipegosis', #sub-regions '55111' : 'R.M. of Park (north)', '55112' : 'R.M. of Shell River', '55113' : 'R.M. of Hillsburg', '55114' : 'R.M. of Shellmouth', '55115' : 'R.M. of Boulton', '55121' : 'R.M. of Dauphin', '55122' : 'R.M. of Ethelbert', '55123' : 'R.M. of Gilbert Plains', '55131' : 'R.M. of Mossey River', '55132' : 'R.M. of Grandview', '55133' : 'R.M. of Ochre River', '55200' : 'Minnedosa - Neepawa - Russell - Riding Mountain National Park', #sub-regions '55211' : 'R.M. of Russell', '55212' : 'R.M. of Silver Creek', '55221' : 'R.M. of Rossburn', '55222' : 'Riding Mountain National Park', '55223' : 'R.M. of Park (south)', '55231' : 'R.M. of Ellice', '55232' : 'R.M. of Birtle', '55233' : 'R.M. of Archie', '55234' : 'R.M. of Miniota', '55241' : 'R.M. of Shoal Lake', '55242' : 'R.M. of Strathclair', '55243' : 'R.M. of Hamiota', '55244' : 'R.M. of Blanshard', '55251' : 'R.M. of Harrison', '55252' : 'R.M. of Saskatchewan', '55253' : 'R.M. of Clanwilliam', '55254' : 'R.M. of Minto', '55255' : 'R.M. of Odanah', '55261' : 'R.M. of Langford', '55262' : 'R.M. of Rosedale', '55300' : 'Ste. Rose - McCreary - Alonsa - Gladstone', #sub-regions '55311' : 'R.M. of Lawrence', '55312' : 'R.M. of Alonsa (north)', '55321' : 'R.M. of Ste. Rose', '55322' : 'R.M. of Alonsa (central)', '55331' : 'R.M. of McCreary', '55332' : 'R.M. of Alonsa (south)', '55340' : 'R.M. of Glenella', '55351' : 'R.M. of Lansdowne', '55352' : 'R.M. of Westbourne', '55353' : 'R.M. of Lakeview', '56100' : 'The Pas - Wanless - Westray - Clearwater Lake Provincial Park', #sub-regions '56110' : 'The Pas, Cormorant, Westray and Wanless', '56120' : 'North and South Moose Lakes', '56200' : 'Swan River - Duck Mountain Provincial Park - Porcupine Provincial Forest', #sub-regions '56211' : 'Westgate, Red Deer Lake and Barrows', '56212' : 'Porcupine Provincial Forest', '56221' : 'R.M. of Mountain (northwest)', '56222' : 'Pelican Rapids and Shoal River First Nations', '56231' : 'R.M. of Swan River', '56232' : 'R.M. of Minitonas', '56241' : 'Duck Mountain Provincial Park and Forest', '56242' : 'R.M. of Mountain (southeast)', '57100' : 'Norway House - Cross Lake - Wabowden', #sub-regions '57110' : 'Molson Lake and Bear Lake', '57120' : 'Cross Lake and Jenpeg', '57130' : 'Wabowden', '57140' : 'Highway 6 between Little Limestone Lake and Ponton', '57150' : 'Norway House', '57200' : 'Poplar River', '57300' : 'Berens River - Little Grand Rapids - Bloodvein - Atikaki', #sub-regions '57310' : 'Berens River', '57320' : 'Little Grand Rapids and Atikaki Provincial Park', '57330' : 'Bloodvein', '58100' : 'Brochet', '58200' : 'Tadoule Lake', '58300' : 'Lynn Lake - Leaf Rapids - Pukatawagan', #sub-regions '58310' : 'Lynn Lake', '58320' : 'Leaf Rapids', '58330' : 'Pukatawagan', '58400' : 'Thompson - Nelson House - Split Lake', #sub-regions '58410' : 'South Indian Lake and Amisk Provincial Natural Park', '58420' : 'Split Lake and York Landing', '58430' : 'Thompson,Thicket Portage and Pikwitonei', '58440' : 'Nelson House', '58500' : 'Flin Flon - Cranberry Portage - Snow Lake', #sub-regions '58510' : 'Snow Lake and Herb Lake Landing', '58520' : 'Flin Flon, Cranberry Portage and Grass River Provincial Park', '59100' : 'Churchill', '59200' : 'York', '59300' : 'Gillam', '59400' : 'Shamattawa', '59500' : 'Island Lake - Oxford House - Gods Lake', #sub-regions '59510' : 'Oxford House and Gods Lake', '59520' : 'Red Sucker Lake', '59530' : 'Island Lake', #Saskatchewan '61100' : 'Leader - Gull Lake', #sub-regions '61111' : 'R.M. of Deer Forks', '61112' : 'R.M. of Happyland', '61113' : 'R.M. of Enterprise', '61114' : 'R.M. of Fox Valley', '61115' : 'R.M. of Big Stick', '61121' : 'R.M. of Clinworth', '61122' : 'R.M. of Miry Creek', '61123' : 'R.M. of Pittville', '61131' : 'R.M. of Piapot', '61132' : 'R.M. of Gull Lake', '61133' : 'R.M. of Carmichael', '61200' : 'Swift Current - Herbert - Cabri - Kyle - Lucky Lake', #sub-regions '61211' : 'R.M. of Lacadena', '61212' : 'R.M. of Victory', '61213' : 'R.M. of Canaan', '61221' : 'R.M. of Riverside', '61222' : 'R.M. of Saskatchewan Landing', '61223' : 'R.M. of Webb', '61224' : 'R.M. of Swift Current', '61225' : 'R.M. of Lac Pelletier', '61231' : 'R.M. of Excelsior', '61232' : 'R.M. of Morse', '61233' : 'R.M. of Coulee', '61234' : 'R.M. of Lawtonia', '61235' : 'R.M. of Glen Bain', '61236' : 'R.M. of Whiska Creek', '61300' : 'Shaunavon - Maple Creek - Val Marie - Cypress Hills', #sub-regions '61311' : 'R.M. of Maple Creek', '61312' : 'Cypress Hills Provincial Park', '61313' : 'R.M. of Reno', '61321' : 'R.M. of Arlington', '61322' : 'R.M. of White Valley', '61323' : 'R.M. of Frontier', '61331' : 'R.M. of Bone Creek', '61332' : 'R.M. of Grassy Creek', '61333' : 'R.M. of Wise Creek', '61341' : 'R.M. of Lone Tree', '61342' : 'R.M. of Val Marie', '61351' : 'R.M. of Auvergne', '61352' : 'R.M. of Pinto Creek', '61353' : 'R.M. of Glen McPherson', '61354' : 'R.M. of Mankota', '62100' : 'City of Regina', '62200' : 'Moose Jaw - Pense - Central Butte - Craik', #sub-regions '62211' : 'R.M. of Maple Bush', '62212' : 'R.M. of Huron', '62213' : 'R.M. of Enfield', '62214' : 'R.M. of Eyebrow', '62221' : 'R.M. of Craik', '62222' : 'R.M. of Sarnia', '62223' : 'R.M. of Marquis', '62224' : 'R.M. of Dufferin', '62231' : 'R.M. of Chaplin', '62232' : 'R.M. of Wheatlands', '62233' : 'R.M. of Shamrock', '62234' : 'R.M. of Rodgers', '62241' : 'R.M. of Caron', '62242' : 'R.M. of Moose Jaw', '62243' : 'R.M. of Pense', '62244' : 'City of Moose Jaw', '62245' : 'R.M. of Hillsborough', '62246' : 'R.M. of Redburn', '62247' : 'R.M. of Baildon', '62300' : 'Fort Qu\'Appelle - Indian Head - Lumsden - Pilot Butte', #sub-regions '62311' : 'R.M. of McKillop', '62312' : 'R.M. of Longlaketon', '62313' : 'R.M. of Lumsden', '62321' : 'R.M. of Cupar', '62322' : 'R.M. of Lipton', '62323' : 'R.M. of Tullymet', '62331' : 'R.M. of Sherwood', '62332' : 'R.M. of Bratt\'s Lake', '62341' : 'R.M. of Lajord', '62342' : 'R.M. of Francis', '62343' : 'R.M. of Montmartre and Assiniboine First Nation', '62351' : 'R.M. of Edenwold and Piapot First Nation', '62352' : 'R.M. of North Qu\'Appelle', '62353' : 'R.M. of South Qu\'Appelle', '62361' : 'R.M. of Abernethy', '62362' : 'R.M. of Indian Head', '62400' : 'Assiniboia - Gravelbourg - Coronach', #sub-regions '62411' : 'R.M. of Gravelbourg', '62412' : 'R.M. of Sutton', '62413' : 'R.M. of Wood River', '62414' : 'R.M. of Stonehenge', '62421' : 'R.M. of Waverley', '62422' : 'R.M. of Old Post', '62431' : 'R.M. of Lake Johnson', '62432' : 'R.M. of Terrell', '62433' : 'R.M. of Lake of the Rivers', '62434' : 'R.M. of Excel', '62441' : 'R.M. of Willow Bunch', '62442' : 'R.M. of Poplar Valley', '62443' : 'R.M. of Hart Butte', '62451' : 'R.M. of Elmsthorpe', '62452' : 'R.M. of Key West', '62461' : 'R.M. of Bengough', '62462' : 'R.M. of The Gap', '62463' : 'R.M. of Happy Valley', '62464' : 'R.M. of Surprise Valley', '62500' : 'Estevan - Weyburn - Radville - Milestone', #sub-regions '62511' : 'R.M. of Caledonia', '62512' : 'R.M. of Scott', '62513' : 'R.M. of Norton', '62514' : 'R.M. of Brokenshell', '62521' : 'R.M. of Wellington', '62522' : 'R.M. of Fillmore', '62523' : 'R.M. of Weyburn', '62524' : 'R.M. of Griffin', '62531' : 'R.M. of Laurier', '62532' : 'R.M. of Lomond', '62533' : 'R.M. of Lake Alma', '62534' : 'R.M. of Souris Valley', '62541' : 'R.M. of Cymri', '62542' : 'R.M. of Benson', '62543' : 'R.M. of Cambria', '62544' : 'R.M. of Estevan', '63100' : 'Yorkton - Melville - Esterhazy', #sub-regions '63111' : 'R.M. of Garry', '63112' : 'R.M. of Orkney', '63113' : 'R.M. of Stanley', '63114' : 'R.M. of Cana', '63121' : 'R.M. of McLeod', '63122' : 'R.M. of Grayson', '63131' : 'R.M. of Wallace', '63132' : 'R.M. of Calder', '63133' : 'R.M. of Saltcoats', '63134' : 'R.M. of Churchbridge', '63141' : 'R.M. of Fertile Belt', '63142' : 'R.M. of Langenburg', '63143' : 'R.M. of Spy Hill', '63200' : 'Moosomin - Grenfell - Kipling - Wawota', #sub-regions '63211' : 'R.M. of Wolseley', '63212' : 'R.M. of Elcapo and Cowessess First Nation', '63221' : 'R.M. of Chester', '63222' : 'R.M. of Hazelwood', '63223' : 'R.M. of Golden West and Ocean Man First Nation', '63224' : 'R.M. of Kingsley', '63231' : 'R.M. of Silverwood', '63232' : 'R.M. of Wawken', '63241' : 'R.M. of Martin', '63242' : 'R.M. of Moosomin', '63243' : 'R.M. of Walpole', '63244' : 'R.M. of Maryfield', '63251' : 'R.M. of Willowdale and Ochapowace First Nation', '63252' : 'R.M. of Rocanville', '63300' : 'Carlyle - Oxbow - Carnduff - Bienfait - Stoughton', #sub-regions '63311' : 'R.M. of Tecumseh', '63312' : 'R.M. of Brock', '63321' : 'R.M. of Browning', '63322' : 'R.M. of Coalfields', '63331' : 'R.M. of Moose Creek', '63332' : 'R.M. of Enniskillen', '63341' : 'R.M. of Moose Mountain', '63342' : 'R.M. of Antler', '63351' : 'R.M. of Storthoaks', '63352' : 'R.M. of Reciprocity', '63353' : 'R.M. of Mount Pleasant', '63354' : 'R.M. of Argyle', '64100' : 'Hudson Bay - Porcupine Plain', #sub-regions '64110' : 'R.M. of Hudson Bay including Shoal Lake and Red Earth First Nations', '64120' : 'R.M. of Hudson Bay including Hudson Bay proper and Reserve', '64130' : 'R.M. of Porcupine', '64200' : 'Kamsack - Canora - Preeceville', #sub-regions '64211' : 'R.M. of Hazel Dell', '64212' : 'R.M. of Preeceville', '64213' : 'R.M. of Invermay', '64214' : 'R.M. of Buchanan', '64221' : 'R.M. of Insinger', '64222' : 'R.M. of Good Lake', '64231' : 'R.M. of Keys and The Key First Nation', '64232' : 'R.M. of St. Philips', '64233' : 'R.M. of Sliding Hills', '64234' : 'R.M. of Cote', '64241' : 'R.M. of Clayton', '64242' : 'R.M. of Livingston', '65100' : 'City of Saskatoon', '65200' : 'Prince Albert - Shellbrook - Spiritwood - Duck Lake', #sub-regions '65211' : 'R.M. of Spiritwood', '65212' : 'R.M. of Canwood and Big River First Nation', '65221' : 'R.M. of Meeting Lake and Lucky Man First Nation', '65222' : 'R.M. of Leask and Mistawasis First Nation', '65230' : 'R.M. of Shellbrook and Sturgeon Lake First Nation', '65240' : 'R.M. of Duck Lake and Beardy\'s First Nation', '65251' : 'R.M. of Lakeland', '65252' : 'R.M. of Paddockwood', '65261' : 'R.M. of Buckland and Wahpeton First Nation', '65262' : 'R.M. of Garden River', '65263' : 'City of Prince Albert', '65271' : 'R.M. of Prince Albert', '65272' : 'R.M. of Birch Hills and Muskoday First Nation', '65273' : 'R.M. of St. Louis and One Arrow First Nation', '65300' : 'Melfort - Tisdale - Nipawin - Carrot River', #sub-regions '65310' : 'R.M. of Torch River', '65321' : 'R.M. of Nipawin', '65322' : 'R.M. of Moose Range', '65331' : 'R.M. of Kinistino and James Smith First Nation', '65332' : 'R.M. of Invergordon', '65333' : 'R.M. of Flett\'s Springs', '65334' : 'R.M. of Three Lakes', '65335' : 'R.M. of Lake Lenore', '65341' : 'R.M. of Willow Creek', '65342' : 'R.M. of Connaught', '65343' : 'R.M. of Star City', '65344' : 'R.M. of Tisdale', '65345' : 'R.M. of Pleasantdale', '65346' : 'R.M. of Barrier Valley', '65351' : 'R.M. of Arborfield', '65352' : 'R.M. of Bjorkdale', '65400' : 'Martensville - Warman - Rosthern - Delisle - Wakaw', #sub-regions '65411' : 'R.M. of Redberry', '65412' : 'R.M. of Blaine Lake', '65413' : 'R.M. of Great Bend', '65421' : 'R.M. of Laird', '65422' : 'R.M. of Rosthern', '65431' : 'R.M. of Eagle Creek', '65432' : 'R.M. of Corman Park', '65433' : 'R.M. of Perdue', '65434' : 'R.M. of Vanscoy', '65441' : 'R.M. of Aberdeen', '65442' : 'R.M. of Fish Creek', '65443' : 'R.M. of Hoodoo', '65444' : 'R.M. of Grant', '65445' : 'R.M. of Bayne', '65451' : 'R.M. of Colonsay', '65452' : 'R.M. of Viscount', '65453' : 'R.M. of Blucher', '65500' : 'Outlook - Watrous - Hanley - Imperial - Dinsmore', #sub-regions '65511' : 'R.M. of Harris', '65512' : 'R.M. of Montrose', '65513' : 'R.M. of Milden', '65514' : 'R.M. of Fertile Valley', '65515' : 'R.M. of King George', '65516' : 'R.M. of Coteau', '65521' : 'R.M. of Dundurn', '65522' : 'R.M. of Rudy', '65523' : 'R.M. of Rosedale', '65524' : 'R.M. of Loreburn', '65531' : 'R.M. of Lost River', '65532' : 'R.M. of Morris', '65533' : 'R.M. of McCraney', '65534' : 'R.M. of Wood Creek', '65541' : 'R.M. of Arm River', '65542' : 'R.M. of Willner', '65543' : 'R.M. of Big Arm', '65600' : 'Humboldt - Wynyard - Wadena - Lanigan - Foam Lake', #sub-regions '65611' : 'R.M. of Humboldt', '65612' : 'R.M. of St. Peter', '65613' : 'R.M. of Wolverine', '65614' : 'R.M. of Leroy', '65621' : 'R.M. of Spalding', '65622' : 'R.M. of Ponass Lake', '65623' : 'R.M. of Lakeside', '65624' : 'R.M. of Lakeview', '65631' : 'R.M. of Usborne', '65632' : 'R.M. of Prairie Rose', '65633' : 'R.M. of Wreford', '65634' : 'R.M. of Mount Hope', '65635' : 'R.M. of Last Mountain Valley', '65641' : 'R.M. of Big Quill', '65642' : 'R.M. of Elfros', '65643' : 'R.M. of Kutawa and Poor Man First Nation', '65644' : 'R.M. of Emerald', '65645' : 'R.M. of Touchwood', '65646' : 'R.M. of Kellross', '65651' : 'R.M. of Kelvington and Yellowquill First Nation', '65652' : 'R.M. of Sasman', '65653' : 'R.M. of Foam Lake and Fishing Lake First Nation', '65654' : 'R.M. of Ituna Bon Accord', '66100' : 'Meadow Lake - Big River - Green Lake - Pierceland', #sub-regions '66110' : 'R.M. of Meadow Lake and Waterhen First Nation', '66120' : 'R.M. of Beaver River', '66130' : 'Green Lake', '66140' : 'R.M. of Loon Lake', '66150' : 'R.M. of Big River', '66200' : 'The Battlefords - Unity - Maidstone - St. Walburg', #sub-regions '66211' : 'R.M. of Frenchman Butte', '66212' : 'R.M. of Mervin', '66213' : 'R.M. of Turtle River', '66221' : 'R.M. of Britannia', '66222' : 'R.M. of Wilton', '66223' : 'R.M. of Eldon', '66224' : 'R.M. of Paynton', '66231' : 'R.M. of Manitou Lake', '66232' : 'R.M. of Hillsdale', '66233' : 'R.M. of Senlac', '66234' : 'R.M. of Round Valley', '66241' : 'R.M. of Cut Knife', '66242' : 'R.M. of Battle River and Sweet Grass First Nation', '66243' : 'R.M. of Buffalo', '66244' : 'R.M. of Prairie, Red Pheasant and Mosquito First Nations', '66251' : 'R.M. of Parkdale', '66252' : 'R.M. of Medstead', '66253' : 'R.M. of Meota', '66254' : 'R.M. of Round Hill', '66260' : 'The Battlefords', '66271' : 'R.M. of North Battleford', '66272' : 'R.M. of Douglas', '66273' : 'R.M. of Mayfield', '66280' : 'R.M. of Glenside', '66300' : 'Kindersley - Rosetown - Biggar - Wilkie - Macklin', #sub-regions '66311' : 'R.M. of Eye Hill', '66312' : 'R.M. of Grass Lake', '66313' : 'R.M. of Heart\'s Hill', '66314' : 'R.M. of Progress', '66321' : 'R.M. of Tramping Lake', '66322' : 'R.M. of Reform', '66323' : 'R.M. of Mariposa', '66324' : 'R.M. of Grandview', '66331' : 'R.M. of Rosemount', '66332' : 'R.M. of Biggar', '66341' : 'R.M. of Antelope Park', '66342' : 'R.M. of Prairiedale', '66343' : 'R.M. of Milton', '66351' : 'R.M. of Oakdale', '66352' : 'R.M. of Winslow', '66353' : 'R.M. of Kindersley', '66361' : 'R.M. of Mountain View', '66362' : 'R.M. of Marriott', '66363' : 'R.M. of Pleasant Valley', '66364' : 'R.M. of St. Andrews', '66371' : 'R.M. of Chesterfield', '66372' : 'R.M. of Newcombe', '66381' : 'R.M. of Monet', '66382' : 'R.M. of Snipe Lake', '66400' : 'City of Lloydminster - SK', '67100' : 'Ile a la Crosse - Buffalo Narrows - Beauval', #sub-regions '67110' : 'Buffalo Narrows and Peter Pond Lake', '67120' : 'Ile a la Crosse and Beauval', '67200' : 'La Ronge - Prince Albert National Park - Narrow Hills Provincial Park', #sub-regions '67210' : 'Highway 165 between Highway 2 and Route 914 including Pinehouse Lake', '67220' : 'Lac La Ronge Provincial Park including La Ronge', '67230' : 'Candle Lake and Narrow Hills Provincial Parks', '67240' : 'Montreal Lake and Molanosa', '67250' : 'Prince Albert National Park', '67300' : 'Pelican Narrows - Cumberland House - Creighton', #sub-regions '67310' : 'Highway 135 including Pelican Narrows and Sandy Bay', '67320' : 'Seabee Mine', '67330' : 'Hanson Lake Road east of Hwy 135 including Creighton', '67340' : 'Hanson Lake Road between Highways 165 and 135', '67350' : 'Cumberland House', '68100' : 'Uranium City - Camsell Portage', '68200' : 'Fond du Lac - Stony Rapids', #sub-regions '68210' : 'Fond du Lac', '68220' : 'Stony Rapids and Black Lake', '68300' : 'La Loche - Clearwater River Provincial Park - Cluff Lake', #sub-regions '68310' : 'Cluff Lake Mine', '68320' : 'La Loche and Clearwater River Provincial Park', '68400' : 'Cree Lake - Key Lake', '68500' : 'Wollaston Lake - Collins Bay', '68600' : 'Southend - Brabant Lake - Kinoosao', #Alberta '71100' : 'Jasper National Park', #sub-regions '71110' : 'Jasper National Park near Pocahontas', '71120' : 'Jasper National Park near Jasper', '71130' : 'Jasper National Park near Columbia Icefield and Sunwapta Falls', '71200' : 'Nordegg - Forestry Trunk Road Highway 734', #sub-regions '71210' : 'Yellowhead County near Cadomin and Robb', '71220' : 'Clearwater County near Chungo Creek', '71230' : 'Clearwater County near Nordegg and Big Horn First Nation', '71240' : 'Clearwater County near Ya-Ha-Tinda Ranch', '71300' : 'Rocky Mountain House - Caroline', #sub-regions '71310' : 'M.D. of Brazeau near Cynthia and Lodgepole', '71320' : 'Yellowhead County near Wolf Lake and Dismal Creek', '71330' : 'Clearwater County near Sunchild First Nation', '71340' : 'M.D. of Brazeau near the Brazeau Dam', '71350' : 'Clearwater County near Rocky Mountain House and Crimson Lake', '71360' : 'Clearwater County near Caroline and James River Bridge', '71400' : 'Banff National Park', #sub-regions '71410' : 'Clearwater County near Siffleur Wilderness area', '71420' : 'Banff National Park near Saskatchewan River Crossing', '71430' : 'Banff National Park near Lake Louise', '71440' : 'Banff National Park near Banff', '71500' : 'Kananaskis - Canmore', #sub-regions '71510' : 'M.D. of Bighorn near Ghost River Wilderness', '71520' : 'M.D. of Bighorn near Canmore, Bow Valley Park and Ghost Lake', '71530' : 'Northern Kananaskis Country near Peter Lougheed Provincial Park', '71540' : 'Southern Kananaskis Country near Highwood and Cataract Creek', '72100' : 'Red Deer - Ponoka - Innisfail - Stettler', #sub-regions '72111' : 'Lacombe County near Eckville', '72112' : 'Lacombe County near Lacombe, Blackfalds and Gull Lake', '72113' : 'Lacombe County near Clive and Alix', '72121' : 'Red Deer County near Sylvan Lake', '72122' : 'Red Deer County near Spruce View and Red Lodge Provincial Park', '72123' : 'Red Deer County near Penhold, Innisfail and Bowden', '72124' : 'Red Deer County near Pine Lake', '72125' : 'Red Deer County near Elnora, Lousana and Delburne', '72131' : 'County of Stettler near Stettler, Erskine and Rochon Sands', '72132' : 'County of Stettler near Big Valley', '72133' : 'County of Stettler near Donalda', '72134' : 'County of Stettler near Gadsby', '72135' : 'County of Stettler near Byemoor', '72140' : 'City of Red Deer', '72150' : 'County of Paintearth near Halkirk', '72160' : 'Flagstaff County near Forestburg', '72170' : 'County of Camrose near Bashaw', '72180' : 'Ponoka county near Ponoka, Hobbema and the Sampson First Nation', '72200' : 'Airdrie - Cochrane - Olds - Sundre', #sub-regions '72210' : 'Mountain View County near Sundre', '72220' : 'Mountain View County near Olds and Didsbury', '72230' : 'Mountain View County near Cremona', '72240' : 'Mountain View County near Carstairs', '72250' : 'M.D. of Rocky View near Airdrie and Crossfield', '72260' : 'M.D. of Rocky View near Bottrell', '72270' : 'M.D. of Rocky View near Cochrane', '72300' : 'Drumheller - Three Hills', #sub-regions '72311' : 'Kneehill County near Torrington', '72312' : 'Kneehill County near Trochu and Dry Island Buffalo Jump Park', '72313' : 'Kneehill County near Three Hills', '72321' : 'Kneehill County near Linden and Acme', '72322' : 'Kneehill County near Carbon', '72331' : 'Starland County near Rumsey', '72332' : 'Starland County near Morrin', '72333' : 'Starland County near Delia', '72341' : 'Wheatland County near Rockyland and Rosebud', '72342' : 'Wheatland County near Highways 569 and 848', '72350' : 'Town of Drumheller', '72360' : 'M.D. of Rocky View near Irricana and Kathryn', '72370' : 'Special Area 2 near Finnegan and Little Fish Lake Provincial Park', '72400' : 'City of Calgary', '72500' : 'Okotoks - High River - Claresholm', #sub-regions '72510' : 'M.D. of Rocky View near Sarcee First Nation', '72521' : 'M.D. of Foothills near Priddis', '72522' : 'M.D. of Foothills near Turner Valley', '72523' : 'M.D. of Foothills near Longview', '72531' : 'M.D. of Foothills near Okotoks', '72532' : 'M.D. of Foothills near High River', '72533' : 'M.D. of Foothills near Cayley', '72541' : 'M.D. of Willow Creek near Nanton', '72542' : 'M.D. of Willow Creek near Claresholm and Stavely', '72600' : 'Brooks - Strathmore - Vulcan', #sub-regions '72610' : 'M.D. of Rocky View near Langdon and Dalemead', '72620' : 'M.D. of Foothills near Blackie', '72631' : 'Wheatland County near Strathmore, Carseland and Lyalta', '72632' : 'Wheatland County near Standard', '72633' : 'Wheatland County near Siksika First Nation and Gleichen', '72634' : 'Wheatland County near Hussar', '72641' : 'Vulcan County near Mossleigh and Arrowwood', '72642' : 'Vulcan County near Vulcan and ensign', '72643' : 'Vulcan County near Champion', '72644' : 'Vulcan County near Lomond', '72645' : 'Vulcan County near Milo', '72651' : 'Newell County near Bassano', '72652' : 'Newell County near Gem', '72653' : 'Newell County near Brooks and Rosemary', '72654' : 'Newell County near Scandia and Bow City', '72660' : 'M.D. of Taber near Enchant', '73100' : 'Crowsnest Pass - Pincher Creek - Waterton Park', #sub-regions '73110' : 'M.D. of Ranchland including Chain Lakes Provincial Park', '73120' : 'M.D. of Pincher Creek near Cowley', '73130' : 'Peigan First Nation', '73140' : 'Municipality of Crowsnest Pass', '73150' : 'M.D. of Pincher Creek near Beauvais Lake Provincial Park', '73160' : 'M.D. of Pincher Creek near Pincher Creek and Twin Butte', '73170' : 'Waterton Lakes National Park', '73200' : 'Cardston - Fort MacLeod - Magrath', #sub-regions '73211' : 'M.D. of Willow Creek near Granum', '73212' : 'M.D. of Willow Creek near Fort MacLeod', '73221' : 'Blood First Nation including Stand Off', '73222' : 'Cardston county near Glenwood', '73231' : 'Cardston county near Magrath and Spring Coulee', '73232' : 'Cardston county near Del Bonita and Whiskey Gap', '73241' : 'Cardston county near Mountain View and Police Outpost Provincial Park', '73242' : 'Cardston county near Cardston and Carway', '73300' : 'Lethbridge - Taber - Milk River', #sub-regions '73311' : 'Lethbridge County near Barons and Nobleford', '73312' : 'Lethbridge County near Picture Butte and Turin', '73313' : 'Lethbridge County near Coaldale', '73314' : 'City of Lethbridge', '73321' : 'M.D. of Taber near Vauxhall and Hayes', '73322' : 'M.D. of Taber near Taber and Cranford', '73323' : 'M.D. of Taber near Grassy Lake', '73331' : 'Warner County near Raymond and New Dayton', '73332' : 'Warner County near Wrentham', '73341' : 'Warner County near Warner', '73342' : 'Warner County near Milk River and Coutts', '73343' : 'Warner County near Writing-On-Stone Provincial Park', '73350' : 'County of Forty Mile near Skiff', '74100' : 'Hanna - Coronation - Oyen', #sub-regions '74111' : 'County of Paintearth near Castor', '74112' : 'County of Paintearth near Brownfield', '74113' : 'County of Paintearth near Coronation', '74121' : 'Special Area 2 near Scapa', '74122' : 'Special Area 2 near Hanna and Richdale', '74123' : 'Special Area 2 near Sunnynook', '74124' : 'Special Area 2 near Cessford', '74131' : 'Special Area 4 near Veteran and Consort', '74132' : 'Special Area 4 near Hemaruka', '74133' : 'Special Area 4 near Kirriemuir and Compeer', '74141' : 'Special Area 3 near Youngstown', '74142' : 'Special Area 3 near Big Stone', '74143' : 'Special Area 3 near New Brigden', '74144' : 'Special Area 3 near Cereal and Oyen', '74145' : 'Special Area 3 near Sibbald', '74150' : 'M.D. of Provost near Bodo', '74160' : 'M.D. of Acadia including Acadia Valley', '74200' : 'Medicine Hat - Bow Island - Suffield', #sub-regions '74211' : 'Cypress County near Tide Lake', '74212' : 'Cypress County near Suffield', '74221' : 'Special Area 2 near Dinosaur Provincial Park', '74222' : 'Special Area 2 near Jenner', '74223' : 'Special Area 2 near Buffalo', '74224' : 'Special Area 2 near Bindloss and Empress', '74231' : 'Newell County near Patricia', '74232' : 'Newell County near Tilley and Rolling Hills', '74241' : 'Cypress County near CFB Suffield', '74242' : 'Cypress County near Redcliff', '74251' : 'Cypress County near Seven Persons', '74252' : 'Cypress County near Dunmore', '74253' : 'Cypress County near Irvine and Walsh', '74261' : 'Cypress County near Schuler', '74262' : 'Cypress County near McNeill', '74270' : 'City of Medicine Hat', '74280' : 'County of Forty Mile near Bow Island and Whitla', '74300' : 'Cypress Hills Provincial Park - Foremost', #sub-regions '74310' : 'County of Forty Mile near Foremost', '74320' : 'County of Forty Mile near Etzikom', '74330' : 'County of Forty Mile near Manyberries', '74340' : 'Cypress County near Cypress Hills Provincial Park', '74350' : 'County of Forty Mile near Aden', '74360' : 'Cypress County near Onefour', '75100' : 'Bonnyville - St. Paul - Cold Lake - Lac La Biche', #sub-regions '75111' : 'Lakeland County near Plamondon', '75112' : 'Lakeland County near Imperial Mills, Heart Lake and Philomena', '75113' : 'Lakeland County near Lac La Biche and Sir Winston Churchill Park', '75114' : 'Lakeland County near Lakeland Provincial Park', '75115' : 'Lakeland County near Rich Lake', '75116' : 'Lakeland County near Cold Lake air weapons range', '75121' : 'Smoky Lake County near Kikino', '75122' : 'Smoky Lake County near Vilna and Whitefish Lake First Nation', '75131' : 'County of St. Paul near Ashmont', '75132' : 'County of St. Paul near St. Paul', '75133' : 'County of St. Paul near Elk Point', '75134' : 'County of St. Paul near Riverview and Unipouheos First Nation', '75141' : 'M.D. of Bonnyville near La Corey and Wolf Lake', '75142' : 'M.D. of Bonnyville near Glendon and Moose Lake Provincial Park', '75143' : 'M.D. of Bonnyville near Bonnyville and Ardmore', '75144' : 'M.D. of Bonnyville near Cold Lake and Grand Centre', '75145' : 'M.D. of Bonnyville near Beaverdam, Elizabeth and Cold Lake First Nation', '75146' : 'M.D. of Bonnyville near Sputinow', '75200' : 'Lloydminster - Wainwright - Vermilion - Provost', #sub-regions '75211' : 'County of Two Hills near Two Hills', '75212' : 'County of Two Hills near Myrnam and Derwent', '75221' : 'County of Minburn near Ranfurly and Innisfree', '75222' : 'County of Minburn near Mannville and Minburn', '75230' : 'Beaver County near Viking', '75241' : 'Flagstaff County near Killam and Sedgewick', '75242' : 'Flagstaff County near Lougheed and Hardisty', '75243' : 'Flagstaff County near Alliance', '75251' : 'County of Vermilion River near Vermilion', '75252' : 'County of Vermilion River near Islay', '75253' : 'County of Vermilion River near Clandonald and Dewberry', '75254' : 'County of Vermilion River near Tulliby Lake', '75255' : 'County of Vermilion River near Kitscoty and Marwayne', '75256' : 'County of Vermilion River near Paradise Valley', '75260' : 'City of Lloydminster - AB', '75271' : 'M.D. of Wainwright near Irma', '75272' : 'M.D. of Wainwright near Wainwright', '75273' : 'M.D. of Wainwright near Edgerton', '75274' : 'M.D. of Wainwright near Chauvin', '75281' : 'M.D. of Provost near Hughenden', '75282' : 'M.D. of Provost near Czar and Metiskow', '75283' : 'M.D. of Provost near Provost', '76100' : 'Westlock - Barrhead - Athabasca', #sub-regions '76110' : 'Woodlands County near Fort Assiniboine', '76121' : 'County of Barrhead near Thunder Lake Provincial Park', '76122' : 'County of Barrhead near Bloomsbury and Neerlandia', '76123' : 'County of Barrhead near Barrhead and Lac La Nonne', '76130' : 'M.D. of Lesser Slave River near Chisholm and Cross Lake Park', '76141' : 'Westlock County near Jarvie', '76142' : 'Westlock County near Westlock and Clyde', '76151' : 'County of Athabasca near Athabasca and Island Lake', '76152' : 'County of Athabasca near Rochester and Meanook', '76153' : 'County of Athabasca near Grassland', '76154' : 'County of Athabasca near Boyle and Caslan', '76160' : 'County of Thorhild near Newbrook and Long Lake Provincial Park', '76200' : 'Spruce Grove - Morinville - Mayerthorpe - Evansburg', #sub-regions '76210' : 'Yellowhead County near Evansburg, Wildwood and MacKay', '76221' : 'Lac Ste. Anne County near Mayerthorpe and Sangudo', '76222' : 'Lac Ste. Anne County near Cherhill and Glenevis', '76223' : 'Lac Ste. Anne County near Onoway, Rich Valley and Lac Ste. Anne', '76231' : 'Parkland County near Tomahawk and Entwistle', '76232' : 'Parkland County near Wabamun Lake Provincial Park', '76233' : 'Parkland County near Stony Plain and Spruce Grove', '76241' : 'Sturgeon County near Calahoo, Villeneuve and Riviere Qui Barre', '76242' : 'Sturgeon County near Legal and Mearns', '76243' : 'Sturgeon County near Morinville', '76300' : 'Fort Saskatchewan - Vegreville - Redwater - Smoky Lake', #sub-regions '76310' : 'County of Thorhild near Thorhild', '76321' : 'Sturgeon County near Bon Accord and Gibbons', '76322' : 'Sturgeon County near Redwater', '76331' : 'Lamont County near Bruderheim and Lamont', '76332' : 'Lamont County near Andrew', '76333' : 'Lamont County near Mundare and Chipman', '76340' : 'Smoky Lake County near Smoky Lake and Waskatenau', '76350' : 'County of Two Hills near Willingdon', '76360' : 'City of Fort Saskatchewan and Northern Strathcona County', '76370' : 'Elk Island National Park', '76380' : 'County of Minburn near Vegreville', '76400' : 'City of Edmonton - St. Albert - Sherwood Park', '76500' : 'Drayton Valley - Devon - Rimbey - Pigeon Lake', #sub-regions '76510' : 'Leduc County near Warburg and Thorsby', '76520' : 'Leduc County near Devon and Calmar', '76530' : 'M.D. of Brazeau near Drayton Valley and Breton', '76540' : 'County of Wetaskiwin near Pigeon Lake', '76550' : 'County of Wetaskiwin near Alder Flats and Winfield', '76560' : 'Ponoka County near Rimbey, Bluffton and Hoadley', '76570' : 'Ponoka County near Crestomere', '76600' : 'Leduc - Camrose - Wetaskiwin - Tofield', #sub-regions '76611' : 'Leduc County near Leduc and Beaumont', '76612' : 'Leduc County near New Sarepta', '76620' : 'County of Wetaskiwin near Wetaskiwin, Millet and Gwynne', '76630' : 'Strathcona County near Cooking Lake', '76641' : 'Beaver County near Tofield', '76642' : 'Beaver County near Ryley and Holden', '76651' : 'County of Camrose near Hay Lakes and Miquelon Lake Provincial Park', '76652' : 'County of Camrose near Camrose', '76653' : 'County of Camrose near New Norway', '76654' : 'County of Camrose near Bawlf', '76660' : 'Flagstaff County near Daysland', '77100' : 'Grande Prairie - Beaverlodge - Valleyview', #sub-regions '77111' : 'County of Grande Prairie near Beaverlodge, Hythe and Demmitt', '77112' : 'County of Grande Prairie near Sexsmith and La Glace', '77113' : 'County of Grande Prairie near Grande Prairie and Wembley', '77121' : 'M.D. of Greenview near Amundson', '77122' : 'M.D. of Greenview near DeBolt', '77123' : 'M.D. of Greenview near Little Smoky', '77124' : 'M.D. of Greenview near Young\'s Point and Sturgeon Lake First Nation', '77125' : 'M.D. of Greenview near Valleyview', '77200' : 'Hinton - Grande Cache', #sub-regions '77210' : 'M.D. of Greenview near Grande Cache', '77220' : 'M.D. of Greenview near Kakwa Wildland Provincial Park', '77230' : 'Yellowhead County near William A. Switzer Provincial Park', '77240' : 'Willmore Wilderness Park', '77250' : 'Yellowhead County near Hinton and Obed Lake Provincial Park', '77300' : 'Slave Lake', #sub-regions '77311' : 'M.D. of Big Lakes near Joussard and East Prairie', '77312' : 'M.D. of Big Lakes near Grouard Mission and Hilliards Bay Park', '77313' : 'M.D. of Big Lakes near Faust and Kinuso', '77321' : 'M.D. of Lesser Slave River near Slave Lake', '77322' : 'M.D. of Lesser Slave River near Slave Lake Provincial Park', '77323' : 'M.D. of Lesser Slave River near Smith', '77330' : 'M.D. of Opportunity near Calling Lake', '77340' : 'County of Athabasca near Wandering River', '77400' : 'Whitecourt - Edson - Fox Creek - Swan Hills', #sub-regions '77410' : 'M.D. of Greenview near Fox Creek', '77421' : 'Yellowhead County near Edson and Marlboro', '77422' : 'Yellowhead County near Peers and Niton Junction', '77431' : 'Woodlands County near Windfall Creek', '77432' : 'Woodlands County near Carson-Pegasus Provincial Park', '77433' : 'Woodlands County near Lone Pine', '77434' : 'Woodlands County near Whitecourt and Blue Ridge', '77440' : 'M.D. of Big Lakes near Swan Hills', '78100' : 'High Level - Rainbow Lake - Fort Vermilion - Mackenzie Highway', #sub-regions '78111' : 'M.D. of Mackenzie near Bistcho Lake', '78112' : 'M.D. of Mackenzie near Zama Lake, Chateh and Rainbow Lake', '78113' : 'M.D. of Mackenzie near Indian Cabins and Steen River', '78114' : 'M.D. of Mackenzie near Meander River', '78115' : 'M.D. of Mackenzie near High Level', '78120' : 'M.D. of Northern Lights near Paddle Prairie and Carcajou', '78131' : 'M.D. of MacKenzie near the Caribou Mountains', '78132' : 'M.D. of Mackenzie near Fort Vermilion and Child Lake First Nation', '78133' : 'M.D. of MacKenzie near John D\'or Prairie and Fox Creek', '78134' : 'M.D. of Mackenzie near Buffalo Head Prairie and La Crete', '78135' : 'M.D. of Mackenzie near Tall Cree First Nation and Wadlin Lake', '78200' : 'Peace River - Fairview - High Prairie - Manning', #sub-regions '78211' : 'M.D. of Clear Hills near Notikewin River', '78212' : 'M.D. of Clear Hills near Cleardale and Worsley', '78213' : 'M.D. of Clear Hills near Eureka River and Hines Creek', '78221' : 'Saddle Hills County near Silver Valley and Bay Tree', '78222' : 'Saddle Hills County near Moonshine Lake Provincial Park', '78223' : 'Saddle Hills County near Woking', '78231' : 'M.D. of Spirit River including Spirit River and Rycroft', '78232' : 'M.D. of Fairview including Fairview, Whitelaw and Dunvegan Provincial Park', '78233' : 'M.D. of Peace including Peace River and Grimshaw', '78241' : 'M.D. of Northern Lights near Manning and Notikewin Provincial Park', '78242' : 'M.D. of Northern Lights near Dixonville and Chinook Valley', '78251' : 'Birch Hills County near Wanham and Peoria', '78252' : 'Birch Hills County near Eaglesham', '78261' : 'M.D. of East Peace near Keppler Creek', '78262' : 'M.D. of East Peace near Nampa and Three Creeks', '78270' : 'M.D. of Smoky River including McLennan, Falher and Girouxville', '78280' : 'M.D. of Big Lakes near High Prairie and Winagami Lake Provincial Park', '78300' : 'Wabasca - Peerless Lake - Gift Lake - Cadotte Lake', #sub-regions '78311' : 'M.D. of East Peace near Bison Lake', '78312' : 'M.D. of East Peace near Woodland Cree First Nation and Little Buffalo', '78313' : 'M.D. of East Peace near Utikoomak Lake First Nation', '78320' : 'M.D. of Big Lakes near Peavine and Gift Lake settlements', '78331' : 'M.D. of Opportunity near Peerless Lake and Trout Lake', '78332' : 'M.D. of Opportunity near Red Earth Creek', '78333' : 'M.D. of Opportunity near Wabasca-Desmarais and Sandy Lake', '78334' : 'M.D. of Opportunity near Chipewyan Lake', '79100' : 'Fort Chipewyan - Wood Buffalo National Park', #sub-regions '79111' : 'Wood Buffalo National Park near Buffalo river', '79112' : 'Wood Buffalo National Park near Hay Camp', '79113' : 'Wood Buffalo National Park near Garden Creek', '79114' : 'Wood Buffalo National Park near Peace Point and Lake Claire', '79121' : 'R.M. of Wood Buffalo near Namur River', '79122' : 'R.M. of Wood Buffalo near Fort Hills', '79123' : 'R.M. of Wood Buffalo near Old Fort and Chipewyan First Nation', '79131' : 'R.M. of Wood Buffalo near Fitzgerald', '79132' : 'R.M. of Wood Buffalo near Colin-Cornwall Lakes Wildland Park', '79133' : 'R.M. of Wood Buffalo near Fort Chipewyan', '79200' : 'Fort McMurray - Fort MacKay', #sub-regions '79210' : 'R.M. of Wood Buffalo near Fort Mackay', '79220' : 'R.M. of Wood Buffalo near Fort McMurray', '79230' : 'R.M. of Wood Buffalo near Anzac and Gregoire Lake Provincial Park', '79240' : 'Lakeland County near Highway 63 and Crow Lake Provincial Park', '79250' : 'R.M. of Wood Buffalo near Conklin and Chard', #British Columbia '81100' : 'Haida Gwaii', '81200' : 'North Vancouver Island', #sub-regions '81210' : 'North- and west-facing coasts', '81220' : 'East-facing coasts', '81300' : 'East Vancouver Island', '81400' : 'West Vancouver Island', #sub-regions '81410' : 'Sombrio Point and north', '81420' : 'South of Sombrio Point', '81500' : 'Inland Vancouver Island', '81600' : 'Greater Victoria', '82100' : 'Central Coast - Coastal sections', '82200' : 'Central Coast - Inland sections', '82300' : 'Sunshine Coast', '82400' : 'Whistler', '82500' : 'Howe Sound', '82600' : 'Metro Vancouver', '82700' : 'Fraser Valley', #sub-regions '82710' : 'West including Abbotsford', '82720' : 'East including Chilliwack', '82800' : 'Southern Gulf Islands', '83100' : 'Fraser Canyon', '83200' : 'South Thompson', '83300' : 'Nicola', '83400' : 'Similkameen', '83500' : 'Okanagan Valley', #sub-regions '83510' : 'North including Vernon', '83520' : 'Central including Kelowna', '83530' : 'South including Penticton', '83600' : 'Shuswap', '84100' : 'Arrow and Slocan Lakes', '84200' : 'Boundary', '84300' : 'West Kootenay', '84400' : 'Kootenay Lake', '84500' : 'East Kootenay', #sub-regions '84510' : 'South including Cranbrook', '84520' : 'North including Invermere', '84600' : 'Elk Valley', '85100' : 'North Thompson', '85200' : 'North Columbia', '85300' : 'Kinbasket', '85400' : 'West Columbia', '85500' : 'East Columbia', '85600' : 'Yoho and Kootenay Parks', '86100' : 'Chilcotin', '86200' : 'Cariboo', #sub-regions '86210' : 'North including Quesnel', '86220' : 'South including Williams Lake', '86300' : '100 Mile', '86400' : 'Yellowhead', '87100' : 'Bulkley Valley - The Lakes', #sub-regions '87110' : 'Northwest including Smithers', '87120' : 'Southeast including Burns Lake', '87200' : 'Williston', '87300' : 'BC Peace River', #sub-regions '87310' : 'South', '87320' : 'North', '87400' : 'Prince George', '87500' : 'McGregor', '88100' : 'Fort Nelson', '88200' : 'Muncho Lake and Stone Mountain Provincial Parks', '88300' : 'Watson Lake - BC', '89100' : 'North Coast - Coastal sections', #sub-regions '89110' : 'Banks Island and north', '89120' : 'South of Banks Island', '89200' : 'North Coast - Inland sections', #sub-regions '89210' : 'North of Kitimat', '89220' : 'Kitimat and south', '89300' : 'Dease Lake', '89400' : 'Cassiar Mountains - BC', '89500' : 'Teslin - BC', '89600' : 'Atlin', '89700' : 'South Klondike Highway - Carcross to White Pass', '89800' : 'Haines Road - Haines Junction to Pleasant Camp', #Yukon '91100' : 'Dawson', '91200' : 'Mayo', '91300' : 'Beaver Creek', '91400' : 'Pelly - Carmacks', '91500' : 'Kluane Lake', '91600' : 'Haines Junction', '91700' : 'Whitehorse', '92100' : 'Teslin - YT', '92200' : 'Cassiar Mountains - YT', '92300' : 'Watson Lake - YT', '92400' : 'Faro - Ross River', '93100' : 'Dempster', '93200' : 'Old Crow', #Northwest Territories '94210' : 'Wrigley Region', '94220' : 'Fort Simpson Region including Jean Marie River', '94230' : 'Fort Liard Region including Nahanni Butte - Trout Lake', '94310' : 'North Slave Region including Wekweti - Wha Ti - Behchoko', '94320' : 'Fort Providence Region including Kakisa - Chan Lake', '94330' : 'Yellowknife Region', '94510' : 'Hay River Region including Enterprise', '94520' : 'Fort Resolution Region including Highway 6', '94530' : 'Lutsel K\'e Region', '94540' : 'Thebacha Region including Fort Smith - Salt River Reserve', '95100' : 'Tuktoyaktuk - East Channel Region', '95200' : 'Aklavik Region', '95300' : 'Inuvik Region', '95400' : 'South Delta Region including Fort McPherson - Tsiigehtchic', '95610' : 'Fort Good Hope Region', '95620' : 'Norman Wells - Tulita Region', '95630' : 'Colville Lake', '95640' : 'Deline', '95800' : 'Paulatuk', '96210' : 'Sachs Harbour', '96310' : 'Ulukhaktok', #Nunavut '97110' : 'Cambridge Bay', '97210' : 'Kugluktuk', '97310' : 'Taloyoak', '97410' : 'Gjoa Haven', '97420' : 'Kugaaruk', '97510' : 'Baker Lake', '97610' : 'Arviat', '97620' : 'Rankin Region including Whale Cove', '97630' : 'Chesterfield Inlet', '97710' : 'Hall Beach', '97720' : 'Igloolik', '97740' : 'Repulse Bay', '97810' : 'Coral Harbour', '97820' : 'Sanikiluaq', '98110' : 'Arctic Bay', '98120' : 'Pond Inlet', '98130' : 'Clyde River', '98210' : 'Qikiqtarjuaq', '98220' : 'Pangnirtung', '98230' : 'Iqaluit', '98240' : 'Kimmirut', '98250' : 'Cape Dorset', '99110' : 'Resolute', '99210' : 'Grise Fjord', 'XXXXX' : 'TEST', } SAME_LOCA={ '0' : None, '1' : 'Northwest', '2' : 'North Central', '3' : 'Northeast', '4' : 'West Central', '5' : 'Central', '6' : 'East Central', '7' : 'Southwest', '8' : 'South Central', '9' : 'Southeast', } SAME_LOCB={ '0' : None, '1': 'MOUNTAIN/HIGH', '2': 'MOUNTAIN', '3': 'VALLEY', } # SAME_CTYB is a list of "B" class counties for geographic subdivisons. If this applies to your area, add your county code to this list, and modify SAME_LOCB as needed. # A, C and D counties all use SAME_LOCA SAME_CTYB=['SAME1', 'SAME2'] SAME__ORG={ '' : None, 'EAS' : {'NAME' : {'US' : 'Broadcast station or cable system', 'CA' : 'Broadcast station or cable system'}, 'PLURAL' : False, 'ARTICLE' : {'US' :'A', 'CA' : 'A'}}, 'CIV' : {'NAME' : {'US' : 'Civil authorities', 'CA' : 'Civil authorities'}, 'PLURAL' : True, 'ARTICLE' : {'US' :'THE', 'CA' : 'THE'}}, 'WXR' : {'NAME' : {'US' : 'National Weather Service', 'CA' : 'Environment Canada'}, 'PLURAL' : False, 'ARTICLE' : {'US' :'THE', 'CA' : ''}}, 'PEP' : {'NAME' : {'US' : 'Primary Entry Point System', 'CA' : 'Primary Entry Point System'}, 'PLURAL' : False, 'ARTICLE' : {'US' :'THE', 'CA' : 'THE'}}, 'EAN' : {'NAME' : {'US' : 'Emergency Action Notification Network ', 'CA' : 'Emergency Action Notification Network '}, 'PLURAL' : False, 'ARTICLE' : {'US' :'THE', 'CA' : 'THE'}}, } SAME_UEEE={ 'W' : 'Warning', 'A' : 'Watch', 'E' : 'Emergency', 'S' : 'Statement', 'T' : 'Test', 'M' : 'Message', 'R' : 'Warning', 'N' : 'Notification', } #SAME__EEE is a list of current and proposed event codes. SAME__EEE={ '' : None, 'BZW' : 'Blizzard Warning', 'CFA' : 'Coastal Flood Watch', 'CFW' : 'Coastal Flood Warning', 'DSW' : 'Dust Storm Warning', 'FFA' : 'Flash Flood Watch', 'FFW' : 'Flash Flood Warning', 'FFS' : 'Flash Flood Statement', 'FLA' : 'Flood Watch', 'FLW' : 'Flood Warning', 'FLS' : 'Flood Statement', 'HWA' : 'High Wind Watch', 'HWW' : 'High Wind Warning', 'HUA' : 'Hurricane Watch', 'HUW' : 'Hurricane Warning', 'HLS' : 'Hurricane Statement', 'SVA' : 'Severe Thunderstorm Watch', 'SVR' : 'Severe Thunderstorm Warning', 'SVS' : 'Severe Weather Statement', 'SMW' : 'Special Marine Warning', 'SPS' : 'Special Weather Statement', 'TOA' : 'Tornado Watch', 'TOR' : 'Tornado Warning', 'TRA' : 'Tropical Storm Watch', 'TRW' : 'Tropical Storm Warning', 'TSA' : 'Tsunami Watch', 'TSW' : 'Tsunami Warning', 'WSA' : 'Winter Storm Watch', 'WSW' : 'Winter Storm Warning', 'EAN' : 'Emergency Action Notification', 'EAT' : 'Emergengy Action Termination', 'NIC' : 'National Information Center', 'NPT' : 'National Periodic Test', 'NAT' : 'National Audible Test', 'NST' : 'National Silent Test', 'RMT' : 'Required Monthly Test', 'RWT' : 'Required Weekly Test', 'ADR' : 'Administrative Message', 'AVA' : 'Avalanche Watch', 'AVW' : 'Avalanche Warning', 'CAE' : 'Child Abduction Emergency', 'CDW' : 'Civil Danger Warning', 'CEM' : 'Civil Emergency Message', 'EQW' : 'Earthquake Warning', 'EVI' : 'Evacuation Immediate', 'FRW' : 'Fire Warning', 'HMW' : 'Hazardous Materials Warning', 'LEW' : 'Law Enforcement Warning', 'LAE' : 'Local Area Emergency', 'TOE' : '911 Outage Emergency', 'NUW' : 'Nuclear Plant Warning', 'RHW' : 'Radiological Hazard Warning', 'SPW' : 'Shelter in Place Warning', 'VOW' : 'Volcano Warning', 'NMN' : 'Network Message Notification', 'DMO' : 'Demo Warning', 'EWW' : 'Extreme Wind Warning', 'SSA' : 'Storm Surge Watch', 'SSW' : 'Storm Surge Warning', 'FSW' : 'Flash Freeze Warning', 'FZW' : 'Freeze Warning', 'HLS' : 'Hurricane Local Statement', 'SMW' : 'Special Marine Warning', 'BHW' : 'Biological Hazard Warning', 'BWW' : 'Boil Water Warning', 'CHW' : 'Chemical Hazard Warning', 'CWW' : 'Contaminated Water Warning', 'DBA' : 'Dam Watch', 'DBW' : 'Dam Break Warning', 'DEW' : 'Contagious Disease Warning', 'EVA' : 'Evacuation Watch', 'FCW' : 'Food Contamination Warning', 'IBW' : 'Iceberg Warning', 'IFW' : 'Industrial Fire Warning', 'LSW' : 'Land Slide Warning', 'POS' : 'Power Outage Statement', 'WFA' : 'Wild Fire Watch', 'WFW' : 'Wild Fire Warning', } # This list is a NOAA list of meteorological station locations cross referenced with the FAA list of airport codes and locations. # Without a definative list of ICAO codes used with NWS alerts, this is the best available information. No source matches all ICAO codes in use. ICAO_LIST={ 'LOCATION' : 'US', 'K01R' : 'CLAIBORNE RANGE, LOUISANA', 'K04V' : 'SAGAUCHE, COLORADO', 'K06D' : 'ROLLA, NORTH DAKOTA', 'K07S' : 'BEAUMONT, KANSAS', 'K08D' : 'STANLEY, NORTH DAKOTA', 'K0J4' : 'FLORALA, ALABAMA', 'K0VG' : 'JONESVILLE, VIRGINIA', 'K11J' : 'ONONDAGA, MICHIGAN', 'K11R' : 'BRENHAM, TEXAS', 'K12N' : 'ANDOVER, NEW JERSEY', 'K14Y' : 'LONG PRAIRIE, MINNESOTA', 'K1A5' : 'FRANKLIN, NORTH CAROLINA', 'K1A6' : 'MIDDLESBORO, KENTUCKY', 'K1B7' : 'BOOTHVILLE, LOUISANA', 'K1F0' : 'ARDMORE, OKLAHOMA', 'K1H2' : 'EFFINGHAM, ILLINOIS', 'K1J1' : 'SAINT SIMON ISLAND, GEORGIA', 'K1K5' : 'ELKHART, KANSAS', 'K1L2' : 'SANTA MONICA, CALIFORNIA', 'K1M4' : 'HALEYVILLE, ALABAMA', 'K1O5' : 'MONTAGUE, CALIFORNIA', 'K1P1' : 'PLYMOUTH, NEW HAMPSHIRE', 'K1V1' : 'RIFLE, COLORADO', 'K1V4' : 'ST JOHNSBURY, VERMONT', 'K20C' : 'SAINT JOSEPH, MICHIGAN', 'K20U' : 'BEACH, NORTH DAKOTA', 'K20V' : 'KREMMLING, COLORADO', 'K21D' : 'LAKE ELMO, MINNESOTA', 'K27A' : 'ELBERT, GEORGIA', 'K29G' : 'RAVENNA, OHIO', 'K2C8' : 'CAVALIER, NORTH DAKOTA', 'K2D5' : 'OAKES, NORTH DAKOTA', 'K2DP' : 'ENGELHARD, NORTH CAROLINA', 'K2G6' : 'MEADVILLE, PENNSYLVANIA', 'K2S9' : 'WILLAPA HARBOR, WASHINGTON', 'K2U7' : 'STANLEY, IDAHO', 'K33G' : 'PORT HURON, MICHIGAN', 'K36U' : 'HEBER CITY, UTAH', 'K3A1' : 'CULLMAN, ALABAMA', 'K3A6' : 'STEVENSON RANCH, CALIFORNIA', 'K3B1' : 'GREENVILLE, MAINE', 'K3I2' : 'POINT PLEASANT, WEST VIRGINIA', 'K3J7' : 'GREENSBORO, GEORGIA', 'K3LF' : 'LITCHFIELD, ILLINOIS', 'K3OI' : 'LAMONI, IOWA', 'K3R1' : 'BAY CITY, TEXAS', 'K3S2' : 'AURORA STATE, OREGON', 'K3T5' : 'LA GRANGE, TEXAS', 'K40B' : 'CLAYTON LAKE, MAINE', 'K40G' : 'VALLE, ARIZONA', 'K40J' : 'PERRY, FLORIDA', 'K41G' : 'BATH, MICHIGAN', 'K44N' : 'MILLBROOK, NEW YORK', 'K46D' : 'CARRINGTON, NORTH DAKOTA', 'K47A' : 'CANTON, GEORGIA', 'K48I' : 'SUTTON, WEST VIRGINIA', 'K49N' : 'EAST MORICHES, NEW YORK', 'K4A9' : 'FORT PAYNE, ALABAMA', 'K4O4' : 'IDABEL, OKLAHOMA', 'K4SL' : 'STAR LAKE JOHNSON RANCH, NEW MEXICO', 'K5H4' : 'HARVEY, NORTH DAKOTA', 'K5T6' : 'SANTA TERESA, NEW MEXICO', 'K6R6' : 'DRYDEN, TEXAS', 'K75S' : 'FREDONIA, WASHINGTON', 'K76S' : 'PENN COVE PARK, WASHINGTON', 'K79J' : 'ANDALUSIA, ALABAMA', 'K7A9' : 'PLAINS, GEORGIA', 'K7L2' : 'LINTON, NORTH DAKOTA', 'K8A0' : 'ALBERTVILLE, ALABAMA', 'K8D3' : 'SISSETON, SOUTH DAKOTA', 'K8S0' : 'KIOWA, MONTANA', 'K96D' : 'WALHALLA, NORTH DAKOTA', 'K96S' : 'DUNGENESS, WASHINGTON', 'K9BB' : 'WELLS, NEVADA', 'K9D7' : 'CANDO, NORTH DAKOTA', 'K9L2' : 'EDWARDS AFB, CALIFORNIA', 'K9V9' : 'CHAMBERLAIN, SOUTH DAKOTA', 'KAAA' : 'LINCOLN, ILLINOIS', 'KAAF' : 'APALACHICOLA, FLORIDA', 'KAAO' : 'WICHITA, KANSAS', 'KAAT' : 'ALTURAS, CALIFORNIA', 'KABE' : 'ALLENTOWN, PENNSYLVANIA', 'KABI' : 'ABILENE, TEXAS', 'KABQ' : 'ALBUQUERQUE, NEW MEXICO', 'KABR' : 'ABERDEEN, SOUTH DAKOTA', 'KABX' : 'ALBUQUERQUE, NEW MEXICO', 'KABY' : 'ALBANY, GEORGIA', 'KACB' : 'BELLAIRE, MICHIGAN', 'KACK' : 'NANTUCKET, MASSACHUSETTS', 'KACP' : 'OAKDALE, LOUISIANA', 'KACQ' : 'WASECA, MINNESOTA', 'KACT' : 'WACO, TEXAS', 'KACV' : 'ARCATA, CALIFORNIA', 'KACY' : 'ATLANTIC CITY, NEW JERSEY', 'KADC' : 'WADENA, MINNESOTA', 'KADG' : 'ADRIAN, MICHIGAN', 'KADH' : 'ADA, OKLAHOMA', 'KADM' : 'ARDMORE, OKLAHOMA', 'KADS' : 'DALLAS, TEXAS', 'KADU' : 'AUDUBON, IOWA', 'KADW' : 'CAMP SPRINGS, MARYLAND', 'KAEG' : 'ALBUQUERQUE, NEW MEXICO', 'KAEJ' : 'BUENA VISTA, COLORADO', 'KAEL' : 'ALBERT LEA, MINNESOTA', 'KAEX' : 'ALEXANDRIA, LOUISIANA', 'KAFF' : 'COLORADO SPRINGS, COLORADO', 'KAFJ' : 'WASHINGTON, PENNSYLVANIA', 'KAFK' : 'NEBRASKA CITY, NEBRASKA', 'KAFN' : 'JAFFREY, NEW HAMPSHIRE', 'KAFP' : 'WADESBORO, NORTH CAROLINA', 'KAFW' : 'FORT WORTH, TEXAS', 'KAGC' : 'PITTSBURGH, PENNSYLVANIA', 'KAGR' : 'AVON PARK, FLORIDA', 'KAGS' : 'AUGUSTA, GEORGIA', 'KAHN' : 'ATHENS, GEORGIA', 'KAIA' : 'ALLIANCE, NEBRASKA', 'KAID' : 'ANDERSON, INDIANA', 'KAIG' : 'ANTIGO, WISCONSIN', 'KAIO' : 'ATLANTIC, IOWA', 'KAIT' : 'AITKIN, MINNESOTA', 'KAIZ' : 'LAKE OZARK, MISSOURI', 'KAJG' : 'MOUNT CARMEL, ILLINOIS', 'KAKH' : 'GASTONIA, NORTH CAROLINA', 'KAKO' : 'AKRON, COLORADO', 'KAKQ' : 'WAKEFIELD, VIRGINIA', 'KAKR' : 'AKRON, OHIO', 'KALB' : 'ALBANY, NEW YORK', 'KALI' : 'ALICE, TEXAS', 'KALM' : 'ALAMOGORDO, NEW MEXICO', 'KALN' : 'ALTON, ILLINOIS', 'KALO' : 'WATERLOO, IOWA', 'KALS' : 'ALAMOSA, COLORADO', 'KALW' : 'WALLA WALLA, WASHINGTON', 'KALX' : 'ALEXANDER CITY, ALABAMA', 'KAMA' : 'AMARILLO, TEXAS', 'KAMG' : 'ALMA, GEORGIA', 'KAMN' : 'ALMA, MICHIGAN', 'KAMW' : 'AMES, IOWA', 'KANB' : 'ANNISTON, ALABAMA', 'KAND' : 'ANDERSON, SOUTH CAROLINA', 'KANE' : 'MINNEAPOLIS, MINNESOTA', 'KANJ' : 'SAULT STE MARIE, MICHIGAN', 'KANW' : 'AINSWORTH, NEBRASKA', 'KAOH' : 'LIMA, OHIO', 'KAOO' : 'ALTOONA, PENNSYLVANIA', 'KAPA' : 'DENVER, COLORADO', 'KAPC' : 'NAPA, CALIFORNIA', 'KAPF' : 'NAPLES, FLORIDA', 'KAPG' : 'ABERDEEN PROVING GROUND, MARYLAND', 'KAPN' : 'ALPENA, MICHIGAN', 'KAPV' : 'APPLE VALLEY, CALIFORNIA', 'KAQO' : 'LLANO, TEXAS', 'KAQP' : 'APPLETON, MINNESOTA', 'KAQR' : 'ATOKA, OKLAHOMA', 'KAQV' : 'FORT POLK, LOUISANA', 'KAQW' : 'NORTH ADAMS, MASSACHUSETTS', 'KARA' : 'NEW IBERIA, LOUISIANA', 'KARB' : 'ANN ARBOR, MICHIGAN', 'KARG' : 'WALNUT RIDGE, ARKANSAS', 'KARM' : 'WHARTON, TEXAS', 'KARR' : 'CHICAGO, ILLINOIS', 'KART' : 'WATERTOWN, NEW YORK', 'KARV' : 'MINOCQUA, WISCONSIN', 'KASD' : 'SLIDELL, LOUISIANA', 'KASE' : 'ASPEN, COLORADO', 'KASG' : 'SPRINGDALE, ARKANSAS', 'KASH' : 'NASHUA, NEW HAMPSHIRE', 'KASJ' : 'AHOSKIE, NORTH CAROLINA', 'KAST' : 'ASTORIA, OREGON', 'KASW' : 'WARSAW, INDIANA', 'KASX' : 'ASHLAND, WISCONSIN', 'KATL' : 'ATLANTA, GEORGIA', 'KATS' : 'ARTESIA, NEW MEXICO', 'KATT' : 'AUSTIN, TEXAS', 'KATW' : 'APPLETON, WISCONSIN', 'KATY' : 'WATERTOWN, SOUTH DAKOTA', 'KAUG' : 'AUGUSTA, MAINE', 'KAUH' : 'AURORA, NEBRASKA', 'KAUM' : 'AUSTIN, MINNESOTA', 'KAUN' : 'AUBURN, CALIFORNIA', 'KAUO' : 'AUBURN, ALABAMA', 'KAUS' : 'AUSTIN, TEXAS', 'KAUW' : 'WAUSAU, WISCONSIN', 'KAVC' : 'SOUTH HILL, VIRGINIA', 'KAVK' : 'ALVA, OKLAHOMA', 'KAVL' : 'ASHEVILLE, NORTH CAROLINA', 'KAVP' : 'SCRANTON, PENNSYLVANIA', 'KAVX' : 'AVALON, CALIFORNIA', 'KAWG' : 'WASHINGTON, IOWA', 'KAWM' : 'WEST MEMPHIS, ARKANSAS', 'KAWO' : 'ARLINGTON, WASHINGTON', 'KAXA' : 'ALGONA, IOWA', 'KAXN' : 'ALEXANDRIA, MINNESOTA', 'KAXS' : 'ALTUS, OKLAHOMA', 'KAYE' : 'AYER, MASSACHUSETTS', 'KAYS' : 'WAYCROSS, GEORGIA', 'KAZC' : 'COLORADO CITY, ARIZONA', 'KAZO' : 'KALAMAZOO, MICHIGAN', 'KBAB' : 'MARYSVILLE, CALIFORNIA', 'KBAC' : 'VALLEY CITY, NORTH DAKOTA', 'KBAD' : 'SHREVEPORT, LOUISANA', 'KBAF' : 'WESTFIELD, MASSACHUSETTS', 'KBAK' : 'COLUMBUS, INDIANA', 'KBAN' : 'BRIDGEPORT, CALIFORNIA', 'KBAX' : 'BAD AXE, MICHIGAN', 'KBAZ' : 'NEW BRAUNFELS, TEXAS', 'KBBB' : 'BENSON, MINNESOTA', 'KBBD' : 'BRADY, TEXAS', 'KBBW' : 'BROKEN BOW, NEBRASKA', 'KBCB' : 'BLACKSBURG, VIRGINIA', 'KBCE' : 'BRYCE CANYON, UTAH', 'KBCT' : 'BOCA RATON, FLORIDA', 'KBDE' : 'BAUDETTE, MINNESOTA', 'KBDL' : 'WINDSOR LOCKS, CONNECTICUT', 'KBDR' : 'BRIDGEPORT, CONNECTICUT', 'KBED' : 'BEDFORD, MASSACHUSETTS', 'KBEH' : 'BENTON HARBOR, MICHIGAN', 'KBFD' : 'BRADFORD, PENNSYLVANIA', 'KBFF' : 'SCOTTSBLUFF, NEBRASKA', 'KBFI' : 'SEATTLE, WASHINGTON', 'KBFL' : 'BAKERSFIELD, CALIFORNIA', 'KBFM' : 'MOBILE, ALABAMA', 'KBFW' : 'SILVER BAY, MINNESOTA', 'KBGD' : 'BORGER, TEXAS', 'KBGE' : 'BAINBRIDGE, GEORGIA', 'KBGM' : 'BINGHAMTON, NEW YORK', 'KBGR' : 'BANGOR, MAINE', 'KBHB' : 'BAR HARBOR, MAINE', 'KBHK' : 'BAKER, MONTANA', 'KBHM' : 'BIRMINGHAM, ALABAMA', 'KBID' : 'BLOCK ISLAND, RHODE ISLAND', 'KBIE' : 'BEATRICE, NEBRASKA', 'KBIF' : 'EL PASO, TEXAS', 'KBIH' : 'BISHOP, CALIFORNIA', 'KBIL' : 'BILLINGS, MONTANA', 'KBIS' : 'BISMARCK, NORTH DAKOTA', 'KBIV' : 'HOLLAND, MICHIGAN', 'KBIX' : 'BILOXI, MISSISSIPPI', 'KBJC' : 'DENVER, COLORADO', 'KBJI' : 'BEMIDJI, MINNESOTA', 'KBJJ' : 'WOOSTER, OHIO', 'KBJN' : 'TONOPAH TEST RANGE, NEVADA', 'KBKB' : 'FORT POLK, LOUISANA', 'KBKE' : 'BAKER CITY, OREGON', 'KBKF' : 'DENVER, COLORADO', 'KBKL' : 'CLEVELAND, OHIO', 'KBKS' : 'FALFURRIAS, TEXAS', 'KBKT' : 'BLACKSTONE, VIRGINIA', 'KBKV' : 'BROOKSVILLE, FLORIDA', 'KBKW' : 'BECKLEY, WEST VIRGINIA', 'KBKX' : 'BROOKINGS, SOUTH DAKOTA', 'KBLF' : 'BLUEFIELD, WEST VIRGINIA', 'KBLH' : 'BLYTHE, CALIFORNIA', 'KBLI' : 'BELLINGHAM, WASHINGTON', 'KBLM' : 'BELMAR, NEW JERSEY', 'KBLU' : 'EMIGRANT GAP, CALIFORNIA', 'KBLV' : 'BELLEVILLE, ILLINOIS', 'KBMG' : 'BLOOMINGTON, INDIANA', 'KBMI' : 'BLOOMINGTON-NORMAL, ILLINOIS', 'KBML' : 'BERLIN, NEW HAMPSHIRE', 'KBMQ' : 'BURNET, TEXAS', 'KBMX' : 'BIRMINGHAM, ALABAMA', 'KBNA' : 'NASHVILLE, TENNESSEE', 'KBNO' : 'BURNS, OREGON', 'KBNW' : 'BOONE, IOWA', 'KBOI' : 'BOISE, IDAHOAHO', 'KBOK' : 'BROOKINGS, OREGON', 'KBOS' : 'BOSTON, MASSACHUSETTS', 'KBOW' : 'BARTOW, FLORIDA', 'KBOX' : 'BOSTON, MASSACHUSETTS', 'KBPG' : 'BIG SPRING, TEXAS', 'KBPI' : 'BIG PINEY, WYOMING', 'KBPK' : 'MOUNTAIN HOME, ARKANSAS', 'KBPP' : 'BOWMAN, NORTH DAKOTA', 'KBPT' : 'BEAUMONT, TEXAS', 'KBQK' : 'BRUNSWICK, GEORGIA', 'KBRD' : 'BRAINERD, MINNESOTA', 'KBRL' : 'BURLINGTON, IOWA', 'KBRO' : 'BROWNSVILLE, TEXAS', 'KBRX' : 'BORDEAUX, WYOMING', 'KBTL' : 'BATTLE CREEK, MICHIGAN', 'KBTM' : 'BUTTE, MONTANA', 'KBTP' : 'BUTLER, PENNSYLVANIA', 'KBTR' : 'BATON ROUGE, LOUISIANA', 'KBTV' : 'BURLINGTON, VERMONT', 'KBUF' : 'BUFFALO, NEW YORK', 'KBUR' : 'BURBANK, CALIFORNIA', 'KBUU' : 'BURLINGTON, WISCONSIN', 'KBUY' : 'BURLINGTON, NORTH CAROLINA', 'KBVE' : 'BOOTHVILLE, LOUISANA', 'KBVI' : 'BEAVER FALLS, PENNSYLVANIA', 'KBVN' : 'ALBION, NEBRASKA', 'KBVO' : 'BARTLESVILLE, OKLAHOMA', 'KBVS' : 'BURLINGTON, WASHINGTON', 'KBVX' : 'BATESVILLE, ARKANSAS', 'KBVY' : 'BEVERLY, MASSACHUSETTS', 'KBWD' : 'BROWNWOOD, TEXAS', 'KBWG' : 'BOWLING GREEN, KENTUCKY', 'KBWI' : 'BALTIMORE, MARYLAND', 'KBWP' : 'WAHPETON, NORTH DAKOTA', 'KBXA' : 'BOGALUSA, LOUISIANA', 'KBYG' : 'BUFFALO, WYOMING', 'KBYH' : 'BLYTHEVILLE, ARKANSAS', 'KBYI' : 'BURLEY, IDAHOAHO', 'KBYS' : 'FORT IRWIN, CALIFORNIA', 'KBYY' : 'BAY CITY, TEXAS', 'KBZN' : 'BOZEMAN, MONTANA', 'KC09' : 'SARATOGA, ILLINOIS', 'KC73' : 'DIXON, ILLINOIS', 'KC75' : 'SPARLAND, ILLINOIS', 'KCAD' : 'CADILLAC, MICHIGAN', 'KCAE' : 'COLUMBIA, SOUTH CAROLINA', 'KCAG' : 'CRAIG, COLORADO', 'KCAK' : 'AKRON, OHIO', 'KCAO' : 'CLAYTON, NEW MEXICO', 'KCAR' : 'CARIBOU, MAINE', 'KCAV' : 'CLARION, IOWA', 'KCBE' : 'CUMBERLAND, MARYLAND', 'KCBF' : 'COUNCIL BLUFFS, IOWA', 'KCBG' : 'CAMBRIDGE, MINNESOTA', 'KCBM' : 'COLUMBUS, MISSISSIPPI', 'KCCO' : 'ATLANTA, GEORGIA', 'KCCR' : 'CONCORD, CALIFORNIA', 'KCCU' : 'COPPER MOUNTAIN, COLORADO', 'KCCY' : 'CHARLES CITY, IOWA', 'KCDC' : 'CEDAR CITY, UTAH', 'KCDD' : 'CRANE LAKE, MINNESOTA', 'KCDH' : 'CAMDEN, ARKANSAS', 'KCDJ' : 'CHILLICOTHE, MISSOURI', 'KCDR' : 'CHADRON, NEBRASKA', 'KCDS' : 'CHILDRESS, TEXAS', 'KCDW' : 'CALDWELL, NEW JERSEY', 'KCEC' : 'CRESCENT CITY, CALIFORNIA', 'KCEF' : 'SPRINGFIELD, MASSACHUSETTS', 'KCEU' : 'CLEMSON, SOUTH CAROLINA', 'KCEW' : 'CRESTVIEW, FLORIDA', 'KCEZ' : 'CORTEZ, COLORADO', 'KCFE' : 'BUFFALO, MINNESOTA', 'KCFS' : 'CARO, MICHIGAN', 'KCFT' : 'CLIFTON, ARIZONA', 'KCFV' : 'COFFEYVILLE, KANSAS', 'KCGF' : 'CLEVELAND, OHIO', 'KCGI' : 'CAPE GIRARDEAU, MISSOURI', 'KCGX' : 'CHICAGO, ILLINOIS', 'KCGZ' : 'CASA GRANDE, ARIZONA', 'KCHA' : 'CHATTANOOGA, TENNESSEE', 'KCHD' : 'CHANDLER, ARIZONA', 'KCHI' : 'CHICAGO, ILLINOIS', 'KCHK' : 'CHICKASHA, OKLAHOMA', 'KCHO' : 'CHARLOTTESVILLE, VIRGINIA', 'KCHS' : 'CHARLESTON, SOUTH CAROLINA', 'KCIC' : 'CHICO, CALIFORNIA', 'KCID' : 'CEDAR RAPIDS, IOWA', 'KCIN' : 'CARROLL, IOWA', 'KCIR' : 'CAIRO, ILLINOIS', 'KCIU' : 'SAULT STE MARIE, MICHIGAN', 'KCJR' : 'CULPEPER, VIRGINIA', 'KCKB' : 'CLARKSBURG, WEST VIRGINIA', 'KCKC' : 'GRAND MARAIS, MINNESOTA', 'KCKN' : 'CROOKSTON, MINNESOTA', 'KCKP' : 'CHEROKEE, IOWA', 'KCKV' : 'CLARKSVILLE, TENNESSEE', 'KCLE' : 'CLEVELAND, OHIO', 'KCLI' : 'CLINTONVILLE, WISCONSIN', 'KCLK' : 'CLINTON, OKLAHOMA', 'KCLL' : 'COLLEGE STATION, TEXAS', 'KCLM' : 'PORT ANGELES, WASHINGTON', 'KCLT' : 'CHARLOTTE, NORTH CAROLINA', 'KCMA' : 'CAMARILLO, CALIFORNIA', 'KCMH' : 'COLUMBUS, OHIO', 'KCMI' : 'CHAMPAIGN-URBANA, ILLINOIS', 'KCMX' : 'HANCOCK, MICHIGAN', 'KCMY' : 'SPARTA, WISCONSIN', 'KCNB' : 'CANBY, MINNESOTA', 'KCNC' : 'CHARITON, IOWA', 'KCNK' : 'CONCORDIA, KANSAS', 'KCNM' : 'CARLSBAD, NEW MEXICO', 'KCNO' : 'CHINO, CALIFORNIA', 'KCNU' : 'CHANUTE, KANSAS', 'KCNY' : 'MOAB, UTAH', 'KCOD' : 'CODY, WYOMING', 'KCOE' : 'COEUR D\'ALENE, IDAHO', 'KCOF' : 'COCOA BEACH, FLORIDA', 'KCOI' : 'MERRITT ISLAND, FLORIDA', 'KCON' : 'CONCORD, NEW HAMPSHIRE', 'KCOQ' : 'CLOQUET, MINNESOTA', 'KCOS' : 'COLORADO SPRINGS, COLORADO', 'KCOT' : 'COTULLA, TEXAS', 'KCOU' : 'COLUMBIA, MISSOURI', 'KCPC' : 'WHITEVILLE, NORTH CAROLINA', 'KCPK' : 'NORFOLK, VIRGINIA', 'KCPR' : 'CASPER, WYOMING', 'KCPS' : 'CAHOKIA, ILLINOIS', 'KCPT' : 'CLEBURNE, TEXAS', 'KCPW' : 'WOLF CREEK PASS, COLORADO', 'KCQB' : 'CHANDLER, OKLAHOMA', 'KCQC' : 'CLINES CORNER, NEW MEXICO', 'KCQM' : 'COOK, MINNESOTA', 'KCQT' : 'LOS ANGELES, CALIFORNIA', 'KCQX' : 'CHATHAM, MASSACHUSETTS', 'KCRE' : 'NORTH MYRTLE BEACH, SOUTH CAROLINA', 'KCRG' : 'JACKSONVILLE, FLORIDA', 'KCRP' : 'CORPUS CHRISTI, TEXAS', 'KCRQ' : 'CARLSBAD, CALIFORNIA', 'KCRS' : 'CORSICANA, TEXAS', 'KCRW' : 'CHARLESTON, WEST VIRGINIA', 'KCSG' : 'COLUMBUS, GEORGIA', 'KCSL' : 'SAN LUIS OBISPO, CALIFORNIA', 'KCSM' : 'CLINTON, OKLAHOMA', 'KCSQ' : 'CRESTON, IOWA', 'KCSV' : 'CROSSVILLE, TENNESSEE', 'KCTB' : 'CUT BANK, MONTANA', 'KCTY' : 'CROSS CITY, FLORIDA', 'KCTZ' : 'CLINTON, NORTH CAROLINA', 'KCUB' : 'COLUMBIA, SOUTH CAROLINA', 'KCUH' : 'CUSHING, OKLAHOMA', 'KCUL' : 'CARMI, ILLINOIS', 'KCUT' : 'CUSTER, SOUTH DAKOTA', 'KCVG' : 'COVINGTON, KENTUCKY', 'KCVN' : 'CLOVIS, NEW MEXICO', 'KCVO' : 'CORVALLIS, OREGON', 'KCVS' : 'CLOVIS, NEW MEXICO', 'KCVX' : 'CHARLEVOIX, MICHIGAN', 'KCWA' : 'MOSINEE, WISCONSIN', 'KCWF' : 'LAKE CHARLES, LOUISIANA', 'KCWI' : 'CLINTON, IOWA', 'KCXO' : 'HOUSTON, TEXAS', 'KCXP' : 'CARSON CITY, NEVADA', 'KCXY' : 'HARRISBURG, PENNSYLVANIA', 'KCYS' : 'CHEYENNE, WYOMING', 'KCZD' : 'COZAD, NEBRASKA', 'KCZK' : 'CASCADE LOCKS, OREGON', 'KCZZ' : 'CAMPO, CALIFORNIA', 'KD07' : 'FAITH, SOUTH DAKOTA', 'KD50' : 'CROSBY, NORTH DAKOTA', 'KD55' : 'LANGDON, NORTH DAKOTA', 'KD60' : 'TIOGA, NORTH DAKOTA', 'KDAA' : 'FORT BELVOIR, VIRGINIA', 'KDAB' : 'DAYTONA BEACH, FLORIDA', 'KDAG' : 'DAGGETT, CALIFORNIA', 'KDAL' : 'DALLAS, TEXAS', 'KDAN' : 'DANVILLE, VIRGINIA', 'KDAW' : 'ROCHESTER, NEW HAMPSHIRE', 'KDAY' : 'DAYTON, OHIO', 'KDBN' : 'DUBLIN, GEORGIA', 'KDBQ' : 'DUBUQUE, IOWA', 'KDCA' : 'WASHINGTON, DC', 'KDCU' : 'DECATUR, ALABAMA', 'KDDC' : 'DODGE CITY, KANSAS', 'KDDH' : 'BENNINGTON, VERMONT', 'KDEC' : 'DECATUR, ILLINOIS', 'KDEH' : 'DECORAH, IOWA', 'KDEN' : 'DENVER, COLORADO', 'KDEQ' : 'DE QUEEN, ARKANSAS', 'KDET' : 'DETROIT, MICHIGAN', 'KDEW' : 'DEER PARK, WASHINGTON', 'KDFI' : 'DEFIANCE, OHIO', 'KDFW' : 'DALLAS-FORT WORTH, TEXAS', 'KDGW' : 'DOUGLAS, WYOMING', 'KDHN' : 'DOTHAN, ALABAMA', 'KDHT' : 'DALHART, TEXAS', 'KDIK' : 'DICKINSON, NORTH DAKOTA', 'KDKB' : 'DE KALB, ILLINOIS', 'KDKK' : 'DUNKIRK, NEW YORK', 'KDKR' : 'CROCKETT, TEXAS', 'KDKX' : 'KNOXVILLE, TENNESSEE', 'KDLF' : 'DEL RIO, TEXAS', 'KDLH' : 'DULUTH, MINNESOTA', 'KDLL' : 'BARABOO, WISCONSIN', 'KDLN' : 'DILLON, MONTANA', 'KDLS' : 'THE DALLES, OREGON', 'KDMA' : 'TUCSON, ARIZONA', 'KDMH' : 'BALTIMORE, MARYLAND', 'KDMN' : 'DEMING, NEW MEXICO', 'KDMO' : 'SEDALIA, MISSOURI', 'KDNK' : 'POLK AAF, LOUISANA', 'KDNL' : 'AUGUSTA, GEORGIA', 'KDNN' : 'DALTON, GEORGIA', 'KDNR' : 'DENVER, COLORADO', 'KDNS' : 'DENISON, IOWA', 'KDNV' : 'DANVILLE, ILLINOIS', 'KDOV' : 'DOVER, DELAWARE', 'KDPA' : 'CHICAGO, ILLINOIS', 'KDPG' : 'DUGWAY, UTAH', 'KDPL' : 'KENANSVILLE, NORTH CAROLINA', 'KDQH' : 'DOUGLAS, GEORGIA', 'KDRA' : 'MERCURY, NEVADA', 'KDRI' : 'DE RIDDER, LOUISIANA', 'KDRO' : 'DURANGO, COLORADO', 'KDRT' : 'DEL RIO, TEXAS', 'KDSM' : 'DES MOINES, IOWA', 'KDSV' : 'DANSVILLE, NEW YORK', 'KDTL' : 'DETROIT LAKES, MINNESOTA', 'KDTN' : 'SHREVEPORT, LOUISIANA', 'KDTO' : 'DENTON, TEXAS', 'KDTS' : 'DESTIN, FLORIDA', 'KDTW' : 'DETROIT, MICHIGAN', 'KDTX' : 'DETROIT, MICHIGAN', 'KDUA' : 'DURANT, OKLAHOMA', 'KDUC' : 'DUNCAN, OKLAHOMA', 'KDUG' : 'DOUGLAS BISBEE, ARIZONA', 'KDUH' : 'LAMBERTVILLE, MICHIGAN', 'KDUJ' : 'DUBOIS, PENNSYLVANIA', 'KDUX' : 'DUMAS, TEXAS', 'KDVL' : 'DEVILS LAKE, NORTH DAKOTA', 'KDVN' : 'DAVENPORT, IOWA', 'KDVP' : 'SLAYTON, MINNESOTA', 'KDVT' : 'PHOENIX, ARIZONA', 'KDWH' : 'HOUSTON, TEXAS', 'KDXR' : 'DANBURY, CONNECTICUT', 'KDXX' : 'MADISON, MINNESOTA', 'KDYL' : 'DOYLESTOWN, PENNSYLVANIA', 'KDYR' : 'DYERSBURG, TENNESSEE', 'KDYS' : 'ABILENE, TEXAS', 'KDYT' : 'DULUTH, MINNESOTA', 'KE24' : 'FT APACHE, ARIZONA', 'KE33' : 'CHAMA, NEW MEXICO', 'KE38' : 'ALPINE, TEXAS', 'KE80' : 'BELEN, NEW MEXICO', 'KEAR' : 'KEARNEY, NEBRASKA', 'KEAT' : 'WENATCHEE, WASHINGTON', 'KEAU' : 'EAU CLAIRE, WISCONSIN', 'KEAX' : 'PLEASANT HILL, MISSOURI', 'KEBG' : 'EDINBURG, TEXAS', 'KEBS' : 'WEBSTER CITY, IOWA', 'KECG' : 'ELIZABETH CITY, NORTH CAROLINA', 'KECP' : 'PANAMA CITY, FLORIDA', 'KECU' : 'ROCKSPRINGS, TEXAS', 'KEDE' : 'EDENTON, NORTH CAROLINA', 'KEDW' : 'EDWARDS AFB, CALIFORNIA', 'KEED' : 'NEEDLES, CALIFORNIA', 'KEEN' : 'KEENE, NEW HAMPSHIRE', 'KEEO' : 'MEEKER, COLORADO', 'KEET' : 'ALABASTER, ALABAMA', 'KEFC' : 'BELLE FOURCHE, SOUTH DAKOTA', 'KEFD' : 'HOUSTON, TEXAS', 'KEFT' : 'MONROE, WISCONSIN', 'KEGE' : 'EAGLE, COLORADO', 'KEGI' : 'DUKE FLD, FLORIDA', 'KEGV' : 'EAGLE RIVER, WISCONSIN', 'KEHA' : 'ELKHART, KANSAS', 'KEHO' : 'SHELBY, NORTH CAROLINA', 'KEHR' : 'HENDERSON, KENTUCKY', 'KEKA' : 'EUREKA, CALIFORNIA', 'KEKM' : 'ELKHART, INDIANA', 'KEKN' : 'ELKINS, WEST VIRGINIA', 'KEKO' : 'ELKO, NEVADA', 'KEKQ' : 'MONTICELLO, KENTUCKY', 'KELD' : 'EL DORADO, ARKANSAS', 'KELM' : 'ELMIRA, NEW YORK', 'KELN' : 'ELLENSBURG, WASHINGTON', 'KELO' : 'ELY, MINNESOTA', 'KELP' : 'EL PASO, TEXAS', 'KELY' : 'ELY, NEVADA', 'KELZ' : 'WELLSVILLE, NEW YORK', 'KEMP' : 'EMPORIA, KANSAS', 'KEMT' : 'EL MONTE, CALIFORNIA', 'KEMV' : 'EMPORIA, VIRGINIA', 'KEND' : 'ENID, OKLAHOMA', 'KENL' : 'CENTRALIA, ILLINOIS', 'KENV' : 'WENDOVER, UTAH', 'KENW' : 'KENOSHA, WISCONSIN', 'KEOK' : 'KEOKUK, IOWA', 'KEPH' : 'EPHRATA, WASHINGTON', 'KEPZ' : 'SANTA TERESA, NEW MEXICO', 'KEQY' : 'MONROE, NORTH CAROLINA', 'KERI' : 'ERIE, PENNSYLVANIA', 'KERV' : 'KERRVILLE, TEXAS', 'KERY' : 'NEWBERRY, MICHIGAN', 'KESC' : 'ESCANABA, MICHIGAN', 'KESF' : 'ALEXANDRIA, LOUISIANA', 'KESN' : 'EASTON, MARYLAND', 'KEST' : 'ESTHERVILLE, IOWA', 'KETB' : 'WEST BEND, WISCONSIN', 'KETH' : 'WHEATON, MINNESOTA', 'KEUF' : 'EUFAULA, ALABAMA', 'KEUG' : 'EUGENE, OREGON', 'KEUL' : 'CALDWELL, IDAHOAHO', 'KEVB' : 'NEW SMYRNA BEACH, FLORIDA', 'KEVM' : 'EVELETH, MINNESOTA', 'KEVV' : 'EVANSVILLE, INDIANA', 'KEVW' : 'EVANSTON, WYOMING', 'KEWB' : 'NEW BEDFORD, MASSACHUSETTS', 'KEWK' : 'NEWTON, KANSAS', 'KEWN' : 'NEW BERN, NORTH CAROLINA', 'KEWR' : 'NEWARK, NEW JERSEY', 'KEXX' : 'LEXINGTON, NORTH CAROLINA', 'KEYE' : 'INDIANAPOLIS, INDIANA', 'KEYF' : 'ELIZABETHTOWN, NORTH CAROLINA', 'KEYW' : 'KEY WEST, FLORIDA', 'KEZF' : 'FREDERICKSBURG, VIRGINIA', 'KEZM' : 'EASTMAN, GEORGIA', 'KF05' : 'VERNON, TEXAS', 'KF10' : 'HENRYETTA, OKLAHOMA', 'KF30' : 'SULPHUR, OKLAHOMA', 'KFAF' : 'FORT EUSTIS, VIRGINIA', 'KFAM' : 'FARMINGTON, MISSOURI', 'KFAR' : 'FARGO, NORTH DAKOTA', 'KFAT' : 'FRESNO, CALIFORNIA', 'KFAY' : 'FAYETTEVILLE, NORTH CAROLINA', 'KFBG' : 'FORT BRAGG, NORTH CAROLINA', 'KFBL' : 'FARIBAULT, MINNESOTA', 'KFCH' : 'FRESNO, CALIFORNIA', 'KFCM' : 'MINNEAPOLIS, MINNESOTA', 'KFCS' : 'FORT CARSON, COLORADO', 'KFDK' : 'FREDERICK, MARYLAND', 'KFDR' : 'FREDERICK, OKLAHOMA', 'KFDY' : 'FINDLAY, OHIO', 'KFEP' : 'FREEPORT, ILLINOIS', 'KFET' : 'FREMONT, NEBRASKA', 'KFFA' : 'KILL DEVIL HILLS, NORTH CAROLINA', 'KFFC' : 'ATLANTA, GEORGIA', 'KFFL' : 'FAIRFIELD, IOWA', 'KFFM' : 'FERGUS FALLS, MINNESOTA', 'KFFO' : 'DAYTON, OHIO', 'KFFT' : 'FRANKFORT, KENTUCKY', 'KFFZ' : 'MESA, ARIZONA', 'KFGN' : 'FLAG ISLAND, MINNESOTA', 'KFHR' : 'FRIDAY HARBOR, WASHINGTON', 'KFHU' : 'FORT HUACHUCA SIERRA VISTA, ARIZONA', 'KFIG' : 'CLEARFIELD, PENNSYLVANIA', 'KFIT' : 'FITCHBURG, MASSACHUSETTS', 'KFKA' : 'PRESTON, MINNESOTA', 'KFKL' : 'FRANKLIN, PENNSYLVANIA', 'KFKN' : 'FRANKLIN, VIRGINIA', 'KFKS' : 'FRANKFORT, MICHIGAN', 'KFLD' : 'FOND DU LAC, WISCONSIN', 'KFLG' : 'FLAGSTAFF, ARIZONA', 'KFLL' : 'FORT LAUDERDALE, FLORIDA', 'KFLO' : 'FLORENCE, SOUTH CAROLINA', 'KFLP' : 'FLIPPIN, ARKANSAS', 'KFLV' : 'FORT LEAVENWORTH, KANSAS', 'KFME' : 'FORT MEADE(ODENTON), MARYLAND', 'KFMH' : 'FALMOUTH, MASSACHUSETTS', 'KFMN' : 'FARMINGTON, NEW MEXICO', 'KFMY' : 'FORT MYERS, FLORIDA', 'KFNB' : 'FALLS CITY, NEBRASKA', 'KFNL' : 'FORT COLLINS, COLORADO', 'KFNT' : 'FLINT, MICHIGAN', 'KFOA' : 'FLORA, ILLINOIS', 'KFOD' : 'FORT DODGE, IOWA', 'KFOE' : 'TOPEKA, KANSAS', 'KFOK' : 'WESTHAMPTON BEACH, NEW YORK', 'KFOZ' : 'BIGFORK, MINNESOTA', 'KFPK' : 'CHARLOTTE, MICHIGAN', 'KFPR' : 'FORT PIERCE, FLORIDA', 'KFQD' : 'RUTHERFORDTON, NORTH CAROLINA', 'KFRG' : 'FARMINGDALE, NEW YORK', 'KFRI' : 'FORT RILEY, KANSAS', 'KFRM' : 'FAIRMONT, MINNESOTA', 'KFSD' : 'SIOUX FALLS, SOUTH DAKOTA', 'KFSE' : 'FOSSTON, MINNESOTA', 'KFSI' : 'LAWTON, OKLAHOMA', 'KFSM' : 'FORT SMITH, ARKANSAS', 'KFST' : 'FORT STOCKTON, TEXAS', 'KFSW' : 'FORT MADISON, IOWA', 'KFTG' : 'DENVER, COLORADO', 'KFTK' : 'FORT KNOX, KENTUCKY', 'KFTW' : 'FORT WORTH, TEXAS', 'KFTY' : 'ATLANTA, GEORGIA', 'KFUL' : 'FULLERTON, CALIFORNIA', 'KFVE' : 'FRENCHVILLE, MAINE', 'KFVX' : 'FARMVILLE, VIRGINIA', 'KFWA' : 'FORT WAYNE, INDIANA', 'KFWC' : 'FAIRFIELD, ILLINOIS', 'KFWN' : 'SUSSEX, NEW JERSEY', 'KFWQ' : 'MONONGAHELA, PENNSYLVANIA', 'KFWS' : 'FORT WORTH, TEXAS', 'KFXE' : 'FORT LAUDERDALE, FLORIDA', 'KFYJ' : 'WEST POINT, VIRGINIA', 'KFYV' : 'FAYETTEVILLE, ARKANSAS', 'KFZY' : 'FULTON, NEW YORK', 'KGAD' : 'GADSDEN, ALABAMA', 'KGAF' : 'GRAFTON, NORTH DAKOTA', 'KGAG' : 'GAGE, OKLAHOMA', 'KGAO' : 'GALLIANO, LOUISIANA', 'KGBD' : 'GREAT BEND, KANSAS', 'KGBG' : 'GALESBURG, ILLINOIS', 'KGBN' : 'GILA BEND, ARIZONA', 'KGCC' : 'GILLETTE, WYOMING', 'KGCK' : 'GARDEN CITY, KANSAS', 'KGCM' : 'CLAREMORE, OKLAHOMA', 'KGCN' : 'GRAND CANYON, ARIZONA', 'KGDB' : 'GRANITE FALLS, MINNESOTA', 'KGDJ' : 'GRANBURY, TEXAS', 'KGDP' : 'GUADALUPE PASS, TEXAS', 'KGDV' : 'GLENDIVE, MONTANA', 'KGED' : 'GEORGETOWN, DELAWARE', 'KGEG' : 'SPOKANE, WASHINGTON', 'KGEU' : 'GLENDALE, ARIZONA', 'KGEV' : 'JEFFERSON, NORTH CAROLINA', 'KGEY' : 'GREYBULL, WYOMING', 'KGEZ' : 'SHELBYVILLE, INDIANA', 'KGFA' : 'GREAT FALLS, MONTANA', 'KGFK' : 'GRAND FORKS, NORTH DAKOTA', 'KGFL' : 'GLENS FALLS, NEW YORK', 'KGGG' : 'LONGVIEW, TEXAS', 'KGGW' : 'GLASGOW, MONTANA', 'KGHW' : 'GLENWOOD, MINNESOTA', 'KGID' : 'GRAND ISLAND, NEBRASKA', 'KGIF' : 'WINTER HAVEN, FLORIDA', 'KGJT' : 'GRAND JUNCTION, COLORADO', 'KGKJ' : 'MEADVILLE, PENNSYLVANIA', 'KGKY' : 'ARLINGTON, TEXAS', 'KGLD' : 'GOODLAND, KANSAS', 'KGLE' : 'GAINESVILLE, TEXAS', 'KGLH' : 'GREENVILLE, MISSISSIPPI', 'KGLR' : 'GAYLORD, MICHIGAN', 'KGLS' : 'GALVESTON, TEXAS', 'KGLW' : 'GLASGOW, KENTUCKY', 'KGMJ' : 'GROVE, OKLAHOMA', 'KGMU' : 'GREENVILLE, SOUTH CAROLINA', 'KGNA' : 'GRAND MARAIS, MINNESOTA', 'KGNC' : 'SEMINOLE, TEXAS', 'KGNR' : 'GREENVILLE, MAINE', 'KGNT' : 'GRANTS, NEW MEXICO', 'KGNV' : 'GAINESVILLE, FLORIDA', 'KGOK' : 'GUTHRIE, OKLAHOMA', 'KGON' : 'GROTON, CONNECTICUT', 'KGOP' : 'GATESVILLE, TEXAS', 'KGOV' : 'GRAYLING, MICHIGAN', 'KGPI' : 'KALISPELL, MONTANA', 'KGPM' : 'GRAND PRAIRIE, TEXAS', 'KGPT' : 'GULFPORT, MISSISSIPPI', 'KGPZ' : 'GRAND RAPIDS, MINNESOTA', 'KGRB' : 'GREEN BAY, WISCONSIN', 'KGRD' : 'GREENWOOD, SOUTH CAROLINA', 'KGRF' : 'TACOMA, WASHINGTON', 'KGRI' : 'GRAND ISLAND, NEBRASKA', 'KGRK' : 'FORT HOOD, TEXAS', 'KGRN' : 'GORDON, NEBRASKA', 'KGRR' : 'GRAND RAPIDS, MICHIGAN', 'KGSB' : 'GOLDSBORO, NORTH CAROLINA', 'KGSH' : 'GOSHEN, INDIANA', 'KGSO' : 'GREENSBORO, NORTH CAROLINA', 'KGSP' : 'GREER, SOUTH CAROLINA', 'KGTB' : 'FORT DRUM, NEW YORK', 'KGTF' : 'GREAT FALLS, MONTANA', 'KGTR' : 'COLUMBUS, MISSISSIPPI', 'KGTU' : 'GEORGETOWN, TEXAS', 'KGUC' : 'GUNNISON, COLORADO', 'KGUP' : 'GALLUP, NEW MEXICO', 'KGUS' : 'BUNKER HILL, INDIANA', 'KGUY' : 'GUYMON, OKLAHOMA', 'KGVL' : 'GAINESVILLE, GEORGIA', 'KGVT' : 'GREENVILLE, TEXAS', 'KGVW' : 'KANSAS CITY, MISSOURI', 'KGWO' : 'GREENWOOD, MISSISSIPPI', 'KGWR' : 'GWINNER, NORTH DAKOTA', 'KGWW' : 'GOLDSBORO, NORTH CAROLINA', 'KGXY' : 'GREELEY, COLORADO', 'KGYB' : 'GIDDINGS, TEXAS', 'KGYI' : 'SHERMAN, TEXAS', 'KGYL' : 'GLENCOE, MINNESOTA', 'KGYR' : 'GOODYEAR, ARIZONA', 'KGYY' : 'GARY, INDIANA', 'KGZH' : 'EVERGREEN, ALABAMA', 'KH92' : 'HOMINY, OKLAHOMA', 'KHAO' : 'HAMILTON, OHIO', 'KHAT' : 'CAPE HATTERAS, NORTH CAROLINA', 'KHBG' : 'HATTIESBURG, MISSISSIPPI', 'KHBI' : 'ASHEBORO, NORTH CAROLINA', 'KHBR' : 'HOBART, OKLAHOMA', 'KHBV' : 'HEBBRONVILLE, TEXAS', 'KHCD' : 'HUTCHINSON, MINNESOTA', 'KHCO' : 'HALLOCK, MINNESOTA', 'KHDC' : 'HAMMOND, LOUISIANA', 'KHDE' : 'HOLDREGE, NEBRASKA', 'KHDN' : 'HAYDEN, COLORADO', 'KHDO' : 'HONDO, TEXAS', 'KHEF' : 'WASHINGTON, DIST, OF COLUMBIA', 'KHEI' : 'HETTINGER, NORTH DAKOTA', 'KHEY' : 'OZARK, ALABAMA', 'KHEZ' : 'NATCHEZ, MISSISSIPPI', 'KHFD' : 'HARTFORD, CONNECTICUT', 'KHFF' : 'HOFFMAN, NORTH CAROLINA', 'KHGR' : 'HAGERSTOWN, MARYLAND', 'KHGX' : 'HOUSTON, TEXAS', 'KHHF' : 'CANADIAN, TEXAS', 'KHHR' : 'HAWTHORNE, CALIFORNIA', 'KHIB' : 'HIBBING, MINNESOTA', 'KHIE' : 'WHITEFIELD, NEW HAMPSHIRE', 'KHIF' : 'OGDEN, UTAH', 'KHIO' : 'PORTLAND, OREGON', 'KHJH' : 'HEBRON, NEBRASKA', 'KHJO' : 'HANFORD, CALIFORNIA', 'KHKA' : 'BLYTHEVILLE, ARKANSAS', 'KHKS' : 'JACKSON, MISSISSIPPI', 'KHKY' : 'HICKORY, NORTH CAROLINA', 'KHLC' : 'HILL CITY, KANSAS', 'KHLG' : 'WHEELING, WEST VIRGINIA', 'KHLN' : 'HELENA, MONTANA', 'KHLR' : 'KILLEEN, TEXAS', 'KHLX' : 'GALAX HILLSVILLE, VIRGINIA', 'KHMN' : 'ALAMOGORDO, NEW MEXICO', 'KHMZ' : 'BEDFORD, PENNSYLVANIA', 'KHNB' : 'HUNTINGBURG, INDIANA', 'KHND' : 'LAS VEGAS, NEVADA', 'KHNR' : 'HARLAN, IOWA', 'KHNZ' : 'OXFORD, NORTH CAROLINA', 'KHOB' : 'HOBBS, NEW MEXICO', 'KHON' : 'HURON, SOUTH DAKOTA', 'KHOP' : 'FORT CAMPBELL, KENTUCKY', 'KHOT' : 'HOT SPRINGS, ARKANSAS', 'KHOU' : 'HOUSTON, TEXAS', 'KHPN' : 'WHITE PLAINS, NEW YORK', 'KHQM' : 'HOQUIAM, WASHINGTON', 'KHQU' : 'THOMSON, GEORGIA', 'KHQZ' : 'MESQUITE, TEXAS', 'KHRI' : 'HERMISTON, OREGON', 'KHRJ' : 'ERWIN, NORTH CAROLINA', 'KHRL' : 'HARLINGEN, TEXAS', 'KHRO' : 'HARRISON, ARKANSAS', 'KHRT' : 'MARY ESTHER, FLORIDA', 'KHSA' : 'BAY ST LOUIS, MISSISSIPPI', 'KHSB' : 'HARRISBURG, ILLINOIS', 'KHSE' : 'HATTERAS, NORTH CAROLINA', 'KHSI' : 'HASTINGS, NEBRASKA', 'KHSP' : 'HOT SPRINGS, VIRGINIA', 'KHST' : 'HOMESTEAD, FLORIDA', 'KHSV' : 'HUNTSVILLE, ALABAMA', 'KHTH' : 'HAWTHORNE, NEVADA', 'KHTL' : 'HOUGHTON LAKE, MICHIGAN', 'KHTO' : 'EAST HAMPTON, NEW YORK', 'KHTS' : 'HUNTINGTON, WEST VIRGINIA', 'KHUF' : 'TERRE HAUTE, INDIANA', 'KHUL' : 'HOULTON, MAINE', 'KHUM' : 'HOUMA, LOUISIANA', 'KHUN' : 'HUNTSVILLE, ALABAMA', 'KHUT' : 'HUTCHINSON, KANSAS', 'KHVN' : 'NEW HAVEN, CONNECTICUT', 'KHVR' : 'HAVRE, MONTANA', 'KHWD' : 'HAYWARD, CALIFORNIA', 'KHWO' : 'HOLLYWOOD, FLORIDA', 'KHWV' : 'SHIRLEY, NEW YORK', 'KHXD' : 'HILTON HEAD ISLAND, SOUTH CAROLINA', 'KHYA' : 'HYANNIS, MASSACHUSETTS', 'KHYI' : 'SAN MARCOS, TEXAS', 'KHYR' : 'HAYWARD, WISCONSIN', 'KHYS' : 'HAYS, KANSAS', 'KHYX' : 'SAGINAW, MICHIGAN', 'KHZE' : 'HAZEN, NORTH DAKOTA', 'KHZX' : 'MC GREGOR, MINNESOTA', 'KHZY' : 'ASHTABULA, OHIO', 'KI12' : 'SIDNEY, OHIO', 'KI16' : 'WINDOM, WEST VIRGINIA', 'KI63' : 'MOUNT STERLING, ILLINOIS', 'KIAB' : 'WICHITA, KANSAS', 'KIAD' : 'WASHINGTON, DIST, OF COLUMBIA', 'KIAG' : 'NIAGARA FALLS, NEW YORK', 'KIAH' : 'HOUSTON, TEXAS', 'KIBM' : 'KIMBALL, NEBRASKA', 'KICL' : 'CLARINDA, IOWA', 'KICR' : 'WINNER, SOUTH DAKOTA', 'KICT' : 'WICHITA, KANSAS', 'KIDA' : 'IDAHO FALLS, IDAHOAHO', 'KIDI' : 'INDIANA, PENNSYLVANIA', 'KIEN' : 'PINE RIDGE, SOUTH DAKOTA', 'KIER' : 'NATCHITOCHES, LOUISIANA', 'KIFP' : 'BULLHEAD CITY, ARIZONA', 'KIGM' : 'KINGMAN, ARIZONA', 'KIGQ' : 'CHICAGO, ILLINOIS', 'KIGX' : 'CHAPEL HILL, NORTH CAROLINA', 'KIIB' : 'INDEPENDENCE, IOWA', 'KIIY' : 'WASHINGTON, GEORGIA', 'KIJD' : 'WILLIMANTIC, CONNECTICUT', 'KIJX' : 'JACKSONVILLE, ILLINOIS', 'KIKK' : 'KANKAKEE, ILLINOIS', 'KIKV' : 'ANKENY, IOWA', 'KILE' : 'KILLEEN, TEXAS', 'KILG' : 'WILMINGTON, DELAWARE', 'KILM' : 'WILMINGTON, NORTH CAROLINA', 'KILN' : 'WILMINGTON, OHIO', 'KILX' : 'LINCOLN, ILLINOIS', 'KIML' : 'IMPERIAL, NEBRASKA', 'KIMT' : 'IRON MOUNTAIN KINGSFORD, MICHIGAN', 'KIND' : 'INDIANAPOLIS, INDIANA', 'KINJ' : 'HILLSBORO, TEXAS', 'KINK' : 'WINK, TEXAS', 'KINL' : 'INTERNATIONAL FALLS, MINNESOTA', 'KINS' : 'INDIAN SPRINGS, NEVADA', 'KINT' : 'WINSTON SALEM, NORTH CAROLINA', 'KINW' : 'WINSLOW, ARIZONA', 'KIOW' : 'IOWA CITY, IOWA', 'KIPJ' : 'LINCOLNTON, NORTH CAROLINA', 'KIPL' : 'IMPERIAL, CALIFORNIA', 'KIPT' : 'WILLIAMSPORT, PENNSYLVANIA', 'KIRK' : 'KIRKSVILLE, MISSOURI', 'KIRS' : 'STURGIS, MICHIGAN', 'KISM' : 'ORLANDO, FLORIDA', 'KISN' : 'WILLISTON, NORTH DAKOTA', 'KISO' : 'KINSTON, NORTH CAROLINA', 'KISP' : 'NEW YORK, NEW YORK', 'KISQ' : 'MANISTIQUE, MICHIGAN', 'KISW' : 'WISCONSIN RAPIDS, WISCONSIN', 'KITH' : 'ITHACA, NEW YORK', 'KITR' : 'BURLINGTON, COLORADO', 'KIWA' : 'PHOENIX, ARIZONA', 'KIWD' : 'IRONWOOD, MICHIGAN', 'KIWI' : 'WISCASSET, MAINE', 'KIWS' : 'HOUSTON, TEXAS', 'KIXD' : 'OLATHE, KANSAS', 'KIYK' : 'INYOKERN, CALIFORNIA', 'KIZA' : 'SANTA YNEZ, CALIFORNIA', 'KIZG' : 'FRYEBURG, MAINE', 'KJAC' : 'JACKSON, WYOMING', 'KJAN' : 'JACKSON, MISSISSIPPI', 'KJAS' : 'JASPER, TEXAS', 'KJAX' : 'JACKSONVILLE, FLORIDA', 'KJBR' : 'JONESBORO, ARKANSAS', 'KJCT' : 'JUNCTION, TEXAS', 'KJDD' : 'MINEOLA, TEXAS', 'KJDN' : 'JORDAN, MONTANA', 'KJEF' : 'JEFFERSON CITY, MISSOURI', 'KJER' : 'JEROME, IDAHOAHO', 'KJES' : 'JESUP, GEORGIA', 'KJFK' : 'NEW YORK, NEW YORK', 'KJGG' : 'WILLIAMSBURG, VIRGINIA', 'KJHW' : 'JAMESTOWN, NEW YORK', 'KJKJ' : 'MOORHEAD, MINNESOTA', 'KJKL' : 'JACKSON, KENTUCKY', 'KJLN' : 'JOPLIN, MISSOURI', 'KJMR' : 'MORA, MINNESOTA', 'KJMS' : 'JAMESTOWN, NORTH DAKOTA', 'KJNX' : 'SMITHFIELD, NORTH CAROLINA', 'KJOT' : 'JOLIET, ILLINOIS', 'KJQF' : 'CONCORD, NORTH CAROLINA', 'KJSO' : 'JACKSONVILLE, TEXAS', 'KJST' : 'JOHNSTOWN, PENNSYLVANIA', 'KJSV' : 'SALLISAW, OKLAHOMA', 'KJVL' : 'JANESVILLE, WISCONSIN', 'KJWG' : 'WATONGA, OKLAHOMA', 'KJWY' : 'MIDLOTHIAN, TEXAS', 'KJXI' : 'GILMER, TEXAS', 'KJXN' : 'JACKSON, MICHIGAN', 'KJYG' : 'ST JAMES, MINNESOTA', 'KJYL' : 'SYLVANIA, GEORGIA', 'KJYM' : 'HILLSDALE, MICHIGAN', 'KJYO' : 'LEESBURG, VIRGINIA', 'KJYR' : 'YORK, NEBRASKA', 'KKLS' : 'KELSO, WASHINGTON', 'KL49' : 'ROSEMONT, CALIFORNIA', 'KLAA' : 'LAMAR, COLORADO', 'KLAF' : 'LAFAYETTE, INDIANA', 'KLAL' : 'LAKELAND, FLORIDA', 'KLAM' : 'LOS ALAMOS, NEW MEXICO', 'KLAN' : 'LANSING, MICHIGAN', 'KLAR' : 'LARAMIE, WYOMING', 'KLAS' : 'LAS VEGAS, NEVADA', 'KLAW' : 'LAWTON, OKLAHOMA', 'KLAX' : 'LOS ANGELES, CALIFORNIA', 'KLBB' : 'LUBBOCK, TEXAS', 'KLBE' : 'LATROBE, PENNSYLVANIA', 'KLBF' : 'NORTH PLATTE, NEBRASKA', 'KLBL' : 'LIBERAL, KANSAS', 'KLBR' : 'CLARKSVILLE, TEXAS', 'KLBT' : 'LUMBERTON, NORTH CAROLINA', 'KLBX' : 'ANGLETON, TEXAS', 'KLCG' : 'WAYNE, NEBRASKA', 'KLCH' : 'LAKE CHARLES, LOUISIANA', 'KLCI' : 'LACONIA, NEW HAMPSHIRE', 'KLCK' : 'COLUMBUS, OHIO', 'KLDM' : 'LUDINGTON, MICHIGAN', 'KLEB' : 'LEBANON, NEW HAMPSHIRE', 'KLEE' : 'LEESBURG, FLORIDA', 'KLEW' : 'AUBURN, MAINE', 'KLEX' : 'LEXINGTON, KENTUCKY', 'KLFI' : 'HAMPTON, VIRGINIA', 'KLFK' : 'LUFKIN, TEXAS', 'KLFT' : 'LAFAYETTE, LOUISIANA', 'KLGA' : 'NEW YORK, NEW YORK', 'KLGB' : 'LONG BEACH, CALIFORNIA', 'KLGC' : 'LAGRANGE, GEORGIA', 'KLGD' : 'LA GRANDE, OREGON', 'KLGU' : 'LOGAN, UTAH', 'KLHB' : 'HEARNE, TEXAS', 'KLHQ' : 'LANCASTER, OHIO', 'KLHW' : 'FORT STEWART(HINESVILLE), GEORGIA', 'KLHX' : 'LA JUNTA, COLORADO', 'KLHZ' : 'LOUISBURG, NORTH CAROLINA', 'KLIC' : 'LIMON, COLORADO', 'KLIT' : 'LITTLE ROCK, ARKANSAS', 'KLIX' : 'SLIDELL, LOUISANA', 'KLJF' : 'LITCHFIELD, MINNESOTA', 'KLKU' : 'LOUISA, VIRGINIA', 'KLKV' : 'LAKEVIEW, OREGON', 'KLLJ' : 'CHALLIS, IDAHOAHO', 'KLLQ' : 'MONTICELLO, ARKANSAS', 'KLMT' : 'KLAMATH FALLS, OREGON', 'KLNC' : 'LANCASTER, TEXAS', 'KLND' : 'LANDER, WYOMING', 'KLNK' : 'LINCOLN, NEBRASKA', 'KLNL' : 'LAND O\' LAKES, WISCONSIN', 'KLNN' : 'WILLOUGHBY, OHIO', 'KLNP' : 'WISE, VIRGINIA', 'KLNR' : 'LONE ROCK, WISCONSIN', 'KLNS' : 'LANCASTER, PENNSYLVANIA', 'KLOL' : 'LOVELOCK, NEVADA', 'KLOM' : 'PHILADELPHIA, PENNSYLVANIA', 'KLOR' : 'FORT RUCKER, ALABAMA', 'KLOT' : 'CHICAGO, ILLINOIS', 'KLOU' : 'LOUISVILLE, KENTUCKY', 'KLOZ' : 'LONDON, KENTUCKY', 'KLPC' : 'LOMPOC, CALIFORNIA', 'KLPR' : 'LORAIN, OHIO', 'KLRD' : 'LAREDO, TEXAS', 'KLRF' : 'LITTLE ROCK, ARKANSAS', 'KLRJ' : 'LE MARS, IOWA', 'KLRU' : 'LAS CRUCES, NEW MEXICO', 'KLSE' : 'LA CROSSE, WISCONSIN', 'KLSF' : 'COLUMBUS, GEORGIA', 'KLSV' : 'LAS VEGAS, NEVADA', 'KLSX' : 'ST CHARLES, MISSOURI', 'KLTS' : 'ALTUS, OKLAHOMA', 'KLUD' : 'DECATUR, TEXAS', 'KLUF' : 'PHOENIX, ARIZONA', 'KLUK' : 'CINCINNATI, OHIO', 'KLUM' : 'MENOMONIE, WISCONSIN', 'KLVJ' : 'HOUSTON, TEXAS', 'KLVK' : 'LIVERMORE, CALIFORNIA', 'KLVM' : 'LIVINGSTON, MONTANA', 'KLVN' : 'MINNEAPOLIS, MINNESOTA', 'KLVS' : 'LAS VEGAS, NEW MEXICO', 'KLWA' : 'SOUTH HAVEN, MICHIGAN', 'KLWB' : 'LEWISBURG, WEST VIRGINIA', 'KLWC' : 'LAWRENCE, KANSAS', 'KLWD' : 'LAMONI, IOWA', 'KLWM' : 'LAWRENCE, MASSACHUSETTS', 'KLWS' : 'LEWISTON, IDAHOAHO', 'KLWT' : 'LEWISTOWN, MONTANA', 'KLWV' : 'LAWRENCEVILLE, ILLINOIS', 'KLWX' : 'STERLING, MARYLAND', 'KLXL' : 'LITTLE FALLS, MINNESOTA', 'KLXN' : 'LEXINGTON, NEBRASKA', 'KLXT' : 'LEE\'S SUMMIT, MISSOURI', 'KLXV' : 'LEADVILLE, COLORADO', 'KLYH' : 'LYNCHBURG, VIRGINIA', 'KLYV' : 'LUVERNE, MINNESOTA', 'KLZK' : 'NORTH LITTLE ROCK, ARKANSAS', 'KLZU' : 'LAWRENCEVILLE, GEORGIA', 'KLZZ' : 'LAMPASAS, TEXAS', 'KM19' : 'NEWPORT, ARKANSAS', 'KM30' : 'METROPOLIS, ILLINOIS', 'KM82' : 'HUNTSVILLE, ALABAMA', 'KM97' : 'TUNICA, MISSISSIPPI', 'KMAE' : 'MADERA, CALIFORNIA', 'KMAF' : 'MIDLAND, TEXAS', 'KMAI' : 'MARIANNA, FLORIDA', 'KMAN' : 'NAMPA, IDAHOAHO', 'KMBG' : 'MOBRIDGE, SOUTH DAKOTA', 'KMBL' : 'MANISTEE, MICHIGAN', 'KMBS' : 'SAGINAW, MICHIGAN', 'KMCB' : 'MC COMB, MISSISSIPPI', 'KMCC' : 'SACRAMENTO, CALIFORNIA', 'KMCD' : 'MACKINAC ISLAND, MICHIGAN', 'KMCE' : 'MERCED, CALIFORNIA', 'KMCF' : 'TAMPA, FLORIDA', 'KMCI' : 'KANSAS CITY, MISSOURI', 'KMCK' : 'MC COOK, NEBRASKA', 'KMCN' : 'MACON, GEORGIA', 'KMCO' : 'ORLANDO, FLORIDA', 'KMCW' : 'MASON CITY, IOWA', 'KMDD' : 'MIDLAND, TEXAS', 'KMDH' : 'CARBONDALE, ILLINOIS', 'KMDQ' : 'HUNTSVILLE, ALABAMA', 'KMDT' : 'HARRISBURG, PENNSYLVANIA', 'KMDW' : 'CHICAGO, ILLINOIS', 'KMDZ' : 'MEDFORD, WISCONSIN', 'KMEB' : 'MAXTON, NORTH CAROLINA', 'KMEG' : 'MEMPHIS, TENNESSEE', 'KMEH' : 'MEACHAM, OREGON', 'KMEI' : 'MERIDIAN, MISSISSIPPI', 'KMEM' : 'MEMPHIS, TENNESSEE', 'KMER' : 'ATWATER, CALIFORNIA', 'KMEZ' : 'MENA, ARKANSAS', 'KMFD' : 'MANSFIELD, OHIO', 'KMFE' : 'MC ALLEN, TEXAS', 'KMFI' : 'MARSHFIELD, WISCONSIN', 'KMFR' : 'MEDFORD, OREGON', 'KMFV' : 'MELFA, VIRGINIA', 'KMGE' : 'MARIETTA, GEORGIA', 'KMGG' : 'MAPLE LAKE, MINNESOTA', 'KMGJ' : 'MONTGOMERY, NEW YORK', 'KMGM' : 'MONTGOMERY, ALABAMA', 'KMGN' : 'HARBOR SPRINGS, MICHIGAN', 'KMGR' : 'MOULTRIE, GEORGIA', 'KMGW' : 'MORGANTOWN, WEST VIRGINIA', 'KMGY' : 'DAYTON, OHIO', 'KMHE' : 'MITCHELL, SOUTH DAKOTA', 'KMHK' : 'MANHATTAN, KANSAS', 'KMHN' : 'MULLEN, NEBRASKA', 'KMHR' : 'SACRAMENTO, CALIFORNIA', 'KMHS' : 'MOUNT SHASTA, CALIFORNIA', 'KMHT' : 'MANCHESTER, NEW HAMPSHIRE', 'KMHV' : 'MOJAVE, CALIFORNIA', 'KMHX' : 'MOREHEAD CITY, NORTH CAROLINA', 'KMIA' : 'MIAMI, FLORIDA', 'KMIB' : 'MINOT, NORTH DAKOTA', 'KMIC' : 'MINNEAPOLIS, MINNESOTA', 'KMIE' : 'MUNCIE, INDIANA', 'KMIV' : 'MILLVILLE, NEW JERSEY', 'KMIW' : 'MARSHALLTOWN, IOWA', 'KMJQ' : 'JACKSON, MINNESOTA', 'KMKC' : 'KANSAS CITY, MISSOURI', 'KMKE' : 'MILWAUKEE, WISCONSIN', 'KMKG' : 'MUSKEGON, MICHIGAN', 'KMKJ' : 'MARION, VIRGINIA', 'KMKL' : 'JACKSON, TENNESSEE', 'KMKN' : 'COMANCHE, TEXAS', 'KMKO' : 'MUSKOGEE, OKLAHOMA', 'KMKT' : 'MANKATO, MINNESOTA', 'KMKX' : 'MILWAUKEE, WISCONSIN', 'KMLB' : 'MELBOURNE, FLORIDA', 'KMLC' : 'MC ALESTER, OKLAHOMA', 'KMLD' : 'MALAD CITY, IDAHOAHO', 'KMLE' : 'OMAHA, NEBRASKA', 'KMLF' : 'MILFORD, UTAH', 'KMLI' : 'MOLINE, ILLINOIS', 'KMLJ' : 'MILLEDGEVILLE, GEORGIA', 'KMLP' : 'MULLAN PASS, IDAHO', 'KMLS' : 'MILES CITY, MONTANA', 'KMLT' : 'MILLINOCKET, MAINE', 'KMLU' : 'MONROE, LOUISIANA', 'KMMH' : 'MAMMOTH LAKES, CALIFORNIA', 'KMMK' : 'MERIDEN, CONNECTICUT', 'KMML' : 'MARSHALL, MINNESOTA', 'KMMT' : 'EASTOVER, SOUTH CAROLINA,', 'KMMU' : 'MORRISTOWN, NEW JERSEY', 'KMMV' : 'MC MINNVILLE, OREGON', 'KMNH' : 'MONUMENT, COLORADO', 'KMNI' : 'MANNING, SOUTH CAROLINA', 'KMNM' : 'MENOMINEE, MICHIGAN', 'KMNN' : 'MARION, OHIO', 'KMOB' : 'MOBILE, ALABAMA', 'KMOD' : 'MODESTO, CALIFORNIA', 'KMOP' : 'MOUNT PLEASANT, MICHIGAN', 'KMOT' : 'MINOT, NORTH DAKOTA', 'KMOX' : 'MORRIS, MINNESOTA', 'KMPO' : 'MOUNT POCONO, PENNSYLVANIA', 'KMPV' : 'BARRE, VERMONT', 'KMPZ' : 'MOUNT PLEASANT, IOWA', 'KMQB' : 'MACOMB, ILLINOIS', 'KMQE' : 'MILTON, MASSACHUSETTS', 'KMQI' : 'MANTEO, NORTH CAROLINA', 'KMQT' : 'MARQUETTE, MICHIGAN', 'KMQY' : 'SMYRNA, TENNESSEE', 'KMRB' : 'MARTINSBURG, WEST VIRGINIA', 'KMRC' : 'COLUMBIA, TENNESSEE', 'KMRF' : 'MARFA, TEXAS', 'KMRH' : 'BEAUFORT, NORTH CAROLINA', 'KMRJ' : 'MINERAL POINT, WISCONSIN', 'KMRN' : 'MORGANTON, NORTH CAROLINA', 'KMRY' : 'MONTEREY, CALIFORNIA', 'KMSL' : 'MUSCLE SHOALS, ALABAMA', 'KMSN' : 'MADISON, WISCONSIN', 'KMSO' : 'MISSOULA, MONTANA', 'KMSP' : 'MINNEAPOLIS, MINNESOTA', 'KMSS' : 'MASSENA, NEW YORK', 'KMSV' : 'MONTICELLO, NEW YORK', 'KMSY' : 'NEW ORLEANS, LOUISIANA', 'KMTC' : 'MOUNT CLEMENS, MICHIGAN', 'KMTH' : 'MARATHON, FLORIDA', 'KMTJ' : 'MONTROSE, COLORADO', 'KMTN' : 'BALTIMORE, MARYLAND', 'KMTO' : 'MATTOON, ILLINOIS', 'KMTP' : 'MONTAUK, NEW YORK', 'KMTV' : 'MARTINSVILLE, VIRGINIA', 'KMTW' : 'MANITOWOC, WISCONSIN', 'KMTX' : 'MISSION, TEXAS', 'KMUI' : 'INDIANTOWN, PENNSYLVANIA', 'KMUO' : 'MOUNTAIN HOME, IDAHO', 'KMUT' : 'MUSCATINE, IOWA', 'KMVE' : 'MONTEVIDEO, MINNESOTA', 'KMVL' : 'MORRISVILLE, VERMONT', 'KMVN' : 'MOUNT VERNON, ILLINOIS', 'KMVY' : 'VINEYARD HAVEN, MASSACHUSETTS', 'KMWA' : 'MARION, ILLINOIS', 'KMWC' : 'MILWAUKEE, WISCONSIN', 'KMWH' : 'MOSES LAKE, WASHINGTON', 'KMWK' : 'MOUNT AIRY, NORTH CAROLINA', 'KMWL' : 'MINERAL WELLS, TEXAS', 'KMWM' : 'WINDOM, MINNESOTA', 'KMWN' : 'MT WASHINGTON, NEW HAMPSHIRE', 'KMWS' : 'MOUNT WILSON, CALIFORNIA', 'KMWT' : 'MOUNT IDA, ARKANSAS', 'KMXF' : 'MONTGOMERY, ALABAMA', 'KMXO' : 'MONTICELLO, IOWA', 'KMYF' : 'SAN DIEGO, CALIFORNIA', 'KMYL' : 'MC CALL, IDAHOAHO', 'KMYP' : 'MONARCH PASS, COLORADO', 'KMYR' : 'MYRTLE BEACH, SOUTH CAROLINA', 'KMYV' : 'MARYSVILLE, CALIFORNIA', 'KMZH' : 'MOOSE LAKE, MINNESOTA', 'KN00' : 'FULTON, NEW YORK', 'KN60' : 'GARRISON, NORTH DAKOTA', 'KNAK' : 'ANNAPOLIS, MARYLAND', 'KNBC' : 'BEAUFORT, SOUTH CAROLINA,', 'KNBE' : 'DALLAS, TEXAS', 'KNBG' : 'NEW ORLEANS, LOUISANA', 'KNBJ' : 'BARIN, ALABAMA', 'KNBT' : 'PINEY ISLAND, NORTH CAROLINA', 'KNCA' : 'JACKSONVILLE, NORTH CAROLINA', 'KNDZ' : 'MILTON, FLORIDA', 'KNEL' : 'LAKEHURST, NEW JERSEY', 'KNEN' : 'JACKSON, FLORIDA', 'KNEW' : 'NEW ORLEANS, LOUISIANA', 'KNFE' : 'FENTRESS, VIRGINIA', 'KNFG' : 'OCEANSIDE, CALIFORNIA', 'KNFJ' : 'MILTON, FLORIDA', 'KNFL' : 'FALLON, NEVADA', 'KNFW' : 'FORT WORTH, TEXAS', 'KNGP' : 'CORPUS CHRISTI, TEXAS', 'KNGU' : 'NORFOLK, VIRGINIA', 'KNGW' : 'CORPUS CHRISTI, TEXAS', 'KNHK' : 'PATUXENT RIVER, MARYLAND', 'KNHZ' : 'BRUNSWICK, MAINE', 'KNID' : 'INYOKERN, CALIFORNIA', 'KNIP' : 'JACKSONVILLE, FLORIDA', 'KNJK' : 'EL CENTRO, CALIFORNIA', 'KNJM' : 'SWANSBORO, NORTH CAROLINA', 'KNJW' : 'MERIDIAN, MISSISSIPPI', 'KNKT' : 'CHERRY POINT, NORTH CAROLINA', 'KNKX' : 'SAN DIEGO, CALIFORNIA', 'KNLC' : 'LEMOORE, CALIFORNIA', 'KNMM' : 'MERIDIAN, MISSISSIPPI', 'KNOG' : 'ORANGE GROVE, TEXAS', 'KNOW' : 'PORT ANGELES, WASHINGTON', 'KNPA' : 'PENSACOLA, FLORIDA', 'KNQA' : 'MILLINGTON, TENNESSEE', 'KNQI' : 'KINGSVILLE, TEXAS', 'KNQX' : 'KEY WEST, FLORIDA', 'KNRA' : 'COUPEVILLE, WASHINGTON', 'KNRB' : 'MAYPORT, FLORIDA', 'KNRC' : 'CROWS LANDING, CALIFORNIA', 'KNRS' : 'IMPERIAL BEACH, CALIFORNIA', 'KNSE' : 'MILTON, FLORIDA', 'KNSI' : 'SAN NICOLAS ISLAND, CALIFORNIA', 'KNTD' : 'POINT MUGU, CALIFORNIA', 'KNTU' : 'VIRGINIA BEACH, VIRGINIA', 'KNUC' : 'SAN CLEMENTE, CALIFORNIA', 'KNUI' : 'ST INIGOES, MARYLAND', 'KNUQ' : 'MOUNTAIN VIEW, CALIFORNIA', 'KNUW' : 'WHIDBEY ISLAND, WASHINGTON', 'KNXP' : 'TWENTYNINE PALMS, CALIFORNIA', 'KNXX' : 'WILLOW GROVE, PENNSYLVANIA', 'KNYC' : 'NEW YORK CITY, NEW YORK', 'KNYG' : 'QUANTICO, VIRGINIA', 'KNYL' : 'YUMA, ARIZONA', 'KNZY' : 'SAN DIEGO, CALIFORNIA', 'KO18' : 'HANFORD, CALIFORNIA', 'KO54' : 'WEAVERVILLE, CALIFORNIA', 'KO64' : 'FORT BRAGG, CALIFORNIA', 'KOAJ' : 'JACKSONVILLE, NORTH CAROLINA', 'KOAK' : 'OAKLAND, CALIFORNIA', 'KOAX' : 'OMAHA, NEBRASKA', 'KOBE' : 'OKEECHOBEE, FLORIDA', 'KOCF' : 'OCALA, FLORIDA', 'KOCH' : 'NACOGDOCHES, TEXAS', 'KOCW' : 'WASHINGTON, NORTH CAROLINA', 'KODO' : 'ODESSA, TEXAS', 'KODX' : 'ORD, NEBRASKA', 'KOEB' : 'COLDWATER, MICHIGAN', 'KOEO' : 'OSCEOLA, WISCONSIN', 'KOFF' : 'OMAHA, NEBRASKA', 'KOFK' : 'NORFOLK, NEBRASKA', 'KOFP' : 'RICHMOND, VIRGINIA', 'KOGA' : 'OGALLALA, NEBRASKA', 'KOGB' : 'ORANGEBURG, SOUTH CAROLINA', 'KOGD' : 'OGDEN, UTAH', 'KOGS' : 'OGDENSBURG, NEW YORK', 'KOJA' : 'WEATHERFORD, OKLAHOMA', 'KOJC' : 'OLATHE, KANSAS', 'KOKB' : 'OCEANSIDE, CALIFORNIA', 'KOKC' : 'OKLAHOMA CITY, OKLAHOMA', 'KOKH' : 'OAK HARBOR, WASHINGTON', 'KOKK' : 'KOKOMO, INDIANA', 'KOKM' : 'OKMULGEE, OKLAHOMA', 'KOKV' : 'WINCHESTER, VIRGINIA', 'KOKX' : 'NEW YORK CITY, NEW YORK', 'KOLD' : 'OLD TOWN, MAINE', 'KOLE' : 'OLEAN, NEW YORK', 'KOLF' : 'WOLF POINT, MONTANA', 'KOLM' : 'OLYMPIA, WASHINGTON', 'KOLS' : 'NOGALES, ARIZONA', 'KOLU' : 'COLUMBUS, NEBRASKA', 'KOLV' : 'OLIVE BRANCH, MISSISSIPPI', 'KOLY' : 'OLNEY-NOBLE, ILLINOIS', 'KOLZ' : 'OELWEIN, IOWA', 'KOMA' : 'OMAHA, NEBRASKA', 'KOMH' : 'ORANGE, VIRGINIA', 'KOMK' : 'OMAK, WASHINGTON', 'KOMN' : 'ORMOND BEACH, FLORIDA', 'KONA' : 'WINONA, MINNESOTA', 'KONL' : 'O\'NEILL, NEBRASKA', 'KONM' : 'SOCORRO, NEW MEXICO', 'KONO' : 'ONTARIO, OREGON', 'KONP' : 'NEWPORT, OREGON', 'KONT' : 'ONTARIO, CALIFORNIA', 'KONX' : 'CURRITUCK, NORTH CAROLINA', 'KONZ' : 'DETROIT, MICHIGAN', 'KOOA' : 'OSKALOOSA, IOWA', 'KOPF' : 'MIAMI, FLORIDA', 'KOPN' : 'THOMASTON, GEORGIA', 'KOQT' : 'OAK RIDGE, TENNESSEE', 'KOQU' : 'NORTH KINGSTOWN, RHODE ISLAND', 'KORB' : 'ORR, MINNESOTA', 'KORC' : 'ORANGE CITY, IOWA', 'KORD' : 'CHICAGO, ILLINOIS', 'KORE' : 'ORANGE, MASSACHUSETTS', 'KORF' : 'NORFOLK, VIRGINIA', 'KORG' : 'ORANGE, TEXAS', 'KORH' : 'WORCESTER, MASSACHUSETTS', 'KORL' : 'ORLANDO, FLORIDA', 'KORS' : 'EASTSOUND, WASHINGTON', 'KOSA' : 'MOUNT PLEASANT, TEXAS', 'KOSC' : 'OSCODA, MICHIGAN', 'KOSH' : 'OSHKOSH, WISCONSIN', 'KOSU' : 'COLUMBUS, OHIO', 'KOTG' : 'WORTHINGTON, MINNESOTA', 'KOTH' : 'NORTH BEND, OREGON', 'KOTM' : 'OTTUMWA, IOWA', 'KOUN' : 'NORMAN, OKLAHOMA', 'KOVE' : 'OROVILLE, CALIFORNIA', 'KOVL' : 'OLIVIA, MINNESOTA', 'KOVS' : 'BOSCOBEL, WISCONSIN', 'KOWA' : 'OWATONNA, MINNESOTA', 'KOWB' : 'OWENSBORO, KENTUCKY', 'KOWD' : 'NORWOOD, MASSACHUSETTS', 'KOXB' : 'OCEAN CITY, MARYLAND', 'KOXC' : 'OXFORD, CONNECTICUT', 'KOXR' : 'OXNARD, CALIFORNIA', 'KOXV' : 'KNOXVILLE, IOWA', 'KOZR' : 'FORT RUCKER, ALABAMA', 'KOZW' : 'HOWELL, MICHIGAN', 'KP01' : 'AJO, ARIZONA', 'KP28' : 'MEDICINE LODGE, KANSAS', 'KP53' : 'MUNSING LAKESHORE, MICHIGAN', 'KP58' : 'PORT HOPE, MICHIGAN', 'KP59' : 'COPPER HARBOR, MICHIGAN', 'KP60' : 'YELLOWSTONE, WYOMING', 'KP68' : 'EUREKA, NEVADA', 'KP69' : 'LOWELL, IDAHO', 'KP92' : 'SALT POINT, LOUISANA', 'KPAE' : 'EVERETT, WASHINGTON', 'KPAH' : 'PADUCAH, KENTUCKY', 'KPAM' : 'PANAMA CITY, FLORIDA', 'KPAO' : 'PALO ALTO, CALIFORNIA', 'KPBF' : 'PINE BLUFF, ARKANSAS', 'KPBG' : 'PLATTSBURGH, NEW YORK', 'KPBH' : 'PHILLIPS, WISCONSIN', 'KPBI' : 'WEST PALM BEACH, FLORIDA', 'KPCM' : 'PLANT CITY, FLORIDA', 'KPCZ' : 'WAUPACA, WISCONSIN', 'KPDC' : 'PRAIRIE DU CHIEN, WISCONSIN', 'KPDK' : 'ATLANTA, GEORGIA', 'KPDT' : 'PENDLETON, OREGON', 'KPDX' : 'PORTLAND, OREGON', 'KPEA' : 'PELLA, IOWA', 'KPEO' : 'PENN YAN, NEW YORK', 'KPEQ' : 'PECOS, TEXAS', 'KPEX' : 'PAYNESVILLE, MINNESOTA', 'KPFC' : 'PACIFIC CITY, OREGON', 'KPFN' : 'PANAMA CITY, FLORIDA', 'KPGA' : 'PAGE, ARIZONA', 'KPGD' : 'PUNTA GORDA, FLORIDA', 'KPGV' : 'GREENVILLE, NORTH CAROLINA', 'KPHD' : 'NEW PHILADELPHIA, OHIO', 'KPHF' : 'NEWPORT NEWS, VIRGINIA', 'KPHL' : 'PHILADELPHIA, PENNSYLVANIA', 'KPHN' : 'PORT HURON, MICHIGAN', 'KPHP' : 'PHILIP, SOUTH DAKOTA', 'KPHX' : 'PHOENIX, ARIZONA', 'KPIA' : 'PEORIA, ILLINOIS', 'KPIB' : 'HATTIESBURG, MISSISSIPPI', 'KPIE' : 'ST PETERSBURG-CLEARWATER, FLORIDA', 'KPIH' : 'POCATELLO, IDAHOAHO', 'KPIL' : 'PORT ISABEL, TEXAS', 'KPIR' : 'PIERRE, SOUTH DAKOTA', 'KPIT' : 'PITTSBURGH, PENNSYLVANIA', 'KPKB' : 'PARKERSBURG, WEST VIRGINIA', 'KPKD' : 'PARK RAPIDS, MINNESOTA', 'KPKF' : 'PARK FALLS, WISCONSIN', 'KPKV' : 'PORT LAVACA, TEXAS', 'KPLB' : 'PLATTSBURGH, NEW YORK', 'KPLN' : 'PELLSTON, MICHIGAN', 'KPMD' : 'PALMDALE, CALIFORNIA', 'KPMP' : 'POMPANO BEACH, FLORIDA', 'KPMV' : 'PLATTSMOUTH, NEBRASKA', 'KPNA' : 'PINEDALE, WYOMING', 'KPNC' : 'PONCA CITY, OKLAHOMA', 'KPNE' : 'PHILADELPHIA, PENNSYLVANIA', 'KPNM' : 'PRINCETON, MINNESOTA', 'KPNS' : 'PENSACOLA, FLORIDA', 'KPNT' : 'PONTIAC, ILLINOIS', 'KPOB' : 'FORT BRAGG, NORTH CAROLINA', 'KPOC' : 'LA VERNE, CALIFORNIA', 'KPOE' : 'FORT POLK, LOUISANA', 'KPOF' : 'POPLAR BLUFF, MISSOURI', 'KPOU' : 'POUGHKEEPSIE, NEW YORK', 'KPPA' : 'PAMPA, TEXAS', 'KPPF' : 'PARSONS, KANSAS', 'KPPQ' : 'PITTSFIELD, ILLINOIS', 'KPQI' : 'PRESQUE ISLE, MAINE', 'KPQL' : 'PASCAGOULA, MISSISSIPPI', 'KPQN' : 'PIPESTONE, MINNESOTA', 'KPRB' : 'PASO ROBLES, CALIFORNIA', 'KPRC' : 'PRESCOTT, ARIZONA', 'KPRG' : 'PARIS, ILLINOIS', 'KPRN' : 'GREENVILLE, ALABAMA', 'KPRX' : 'PARIS, TEXAS', 'KPSC' : 'PASCO, WASHINGTON', 'KPSF' : 'PITTSFIELD, MASSACHUSETTS', 'KPSK' : 'DUBLIN, VIRGINIA', 'KPSM' : 'PORTSMOUTH, NEW HAMPSHIRE', 'KPSN' : 'PALESTINE, TEXAS', 'KPSP' : 'PALM SPRINGS, CALIFORNIA', 'KPSX' : 'PALACIOS, TEXAS', 'KPTB' : 'PETERSBURG, VIRGINIA', 'KPTK' : 'PONTIAC, MICHIGAN', 'KPTN' : 'PATTERSON, LOUISIANA', 'KPTT' : 'PRATT, KANSAS', 'KPTV' : 'PORTERVILLE, CALIFORNIA', 'KPTW' : 'POTTSTOWN, PENNSYLVANIA', 'KPUB' : 'PUEBLO, COLORADO', 'KPUC' : 'PRICE, UTAH', 'KPUW' : 'PULLMAN, WASHINGTON', 'KPVC' : 'PROVINCETOWN, MASSACHUSETTS', 'KPVD' : 'PROVIDENCE, RHODE ISLAND', 'KPVJ' : 'PAULS VALLEY, OKLAHOMA', 'KPVU' : 'PROVO, UTAH', 'KPVW' : 'PLAINVIEW, TEXAS', 'KPWA' : 'OKLAHOMA CITY, OKLAHOMA', 'KPWC' : 'PINE RIVER, MINNESOTA', 'KPWG' : 'WACO, TEXAS', 'KPWK' : 'CHICAGO, ILLINOIS', 'KPWM' : 'PORTLAND, MAINE', 'KPWT' : 'BREMERTON, WASHINGTON', 'KPYM' : 'PLYMOUTH, MASSACHUSETTS', 'KPYX' : 'PERRYTON, TEXAS', 'KPZQ' : 'ROGERS CITY, MICHIGAN', 'KQCA' : 'GRANITE PEAK, UTAH', 'KQCB' : 'GRANITE PEAK, UTAH', 'KRAC' : 'RACINE, WISCONSIN', 'KRAD' : 'WARROAD, MINNESOTA', 'KRAL' : 'RIVERSIDE, CALIFORNIA', 'KRAP' : 'RAPID CITY, SOUTH DAKOTA', 'KRAS' : 'PORT ARANSAS, TEXAS', 'KRBD' : 'DALLAS, TEXAS', 'KRBG' : 'ROSEBURG, OREGON', 'KRBL' : 'RED BLUFF, CALIFORNIA', 'KRBO' : 'ROBSTOWN, TEXAS', 'KRCA' : 'RAPID CITY, SOUTH DAKOTA', 'KRCX' : 'LADYSMITH, WISCONSIN', 'KRDD' : 'REDDING, CALIFORNIA', 'KRDG' : 'READING, PENNSYLVANIA', 'KRDK' : 'RED OAK, IOWA', 'KRDM' : 'REDMOND, OREGON', 'KRDR' : 'GRAND FORKS, NORTH DAKOTA', 'KRDU' : 'RALEIGH-DURHAM, NORTH CAROLINA', 'KRED' : 'RED LODGE, MONTANA', 'KREE' : 'JANESVILLE, WISCONSIN', 'KREO' : 'ROME, OREGON', 'KRFD' : 'CHICAGO, ILLINOIS', 'KRGK' : 'RED WING, MINNESOTA', 'KRHI' : 'RHINELANDER, WISCONSIN', 'KRHP' : 'ANDREWS, NORTH CAROLINA', 'KRHV' : 'SAN JOSE, CALIFORNIA', 'KRIC' : 'RICHMOND, VIRGINIA', 'KRIL' : 'RIFLE, COLORADO', 'KRIV' : 'RIVERSIDE, CALIFORNIA', 'KRIW' : 'RIVERTON, WYOMING', 'KRKD' : 'ROCKLAND, MAINE', 'KRKP' : 'ROCKPORT, TEXAS', 'KRKR' : 'POTEAU, OKLAHOMA', 'KRKS' : 'ROCK SPRINGS, WYOMING', 'KRLX' : 'CHARLESTON, WEST VIRGINIA', 'KRME' : 'ROME, NEW YORK', 'KRMG' : 'ROME, GEORGIA', 'KRMN' : 'STAFFORD, VIRGINIA', 'KRMY' : 'MARSHALL, MICHIGAN', 'KRND' : 'SAN ANTONIO, TEXAS', 'KRNH' : 'NEW RICHMOND, WISCONSIN', 'KRNM' : 'RAMONA, CALIFORNIA', 'KRNO' : 'RENO, NEVADA', 'KRNP' : 'OWOSSO, MICHIGAN', 'KRNT' : 'RENTON, WASHINGTON', 'KROA' : 'ROANOKE, VIRGINIA', 'KROC' : 'ROCHESTER, NEW YORK', 'KROG' : 'ROGERS, ARKANSAS', 'KROS' : 'RUSH CITY, MINNESOTA', 'KROW' : 'ROSWELL, NEW MEXICO', 'KROX' : 'ROSEAU, MINNESOTA', 'KRPD' : 'RICE LAKE, WISCONSIN', 'KRPH' : 'GRAHAM, TEXAS', 'KRPJ' : 'ROCHELLE, ILLINOIS', 'KRQB' : 'BIG RAPIDS, MICHIGAN', 'KRQE' : 'WINDOW ROCK, ARIZONA', 'KRQO' : 'EL RENO, OKLAHOMA', 'KRRL' : 'MERRILL, WISCONSIN', 'KRRT' : 'WARROAD, MINNESOTA', 'KRSL' : 'RUSSELL, KANSAS', 'KRSN' : 'RUSTON, LOUISIANA', 'KRST' : 'ROCHESTER, MINNESOTA', 'KRSV' : 'ROBINSON, ILLINOIS', 'KRSW' : 'FORT MYERS, FLORIDA', 'KRTN' : 'RATON, NEW MEXICO', 'KRUE' : 'RUSSELLVILLE, ARKANSAS', 'KRUG' : 'RUGBY, NORTH DAKOTA', 'KRUQ' : 'SALISBURY, NORTH CAROLINA', 'KRUT' : 'RUTLAND, VERMONT', 'KRVL' : 'REEDSVILLE, PENNSYLVANIA', 'KRVS' : 'TULSA, OKLAHOMA', 'KRWF' : 'REDWOOD FALLS, MINNESOTA', 'KRWI' : 'ROCKY MOUNT, NORTH CAROLINA', 'KRWL' : 'RAWLINS, WYOMING', 'KRWV' : 'CALDWELL, TEXAS', 'KRXE' : 'REXBURG, IDAHOAHO', 'KRYV' : 'WATERTOWN, WISCONSIN', 'KRYY' : 'ATLANTA, GEORGIA', 'KRZN' : 'SIREN, WISCONSIN', 'KRZZ' : 'ROANOKE RAPIDS, NORTH CAROLINA', 'KS21' : 'SUNRIVER, OREGON', 'KS25' : 'WATFORD CITY, NORTH DAKOTA', 'KS32' : 'COOPERSTOWN, NORTH DAKOTA', 'KS47' : 'HATHAWAY MEAD, OREGON', 'KS80' : 'GRANGEVILLE, IDAHO', 'KS88' : 'ARLINGTON, WASHINGTON', 'KSAC' : 'SACRAMENTO, CALIFORNIA', 'KSAD' : 'SAFFORD, ARIZONA', 'KSAF' : 'SANTA FE, NEW MEXICO', 'KSAN' : 'SAN DIEGO, CALIFORNIA', 'KSAR' : 'SPARTA, ILLINOIS', 'KSAT' : 'SAN ANTONIO, TEXAS', 'KSAV' : 'SAVANNAH, GEORGIA', 'KSAW' : 'MARQUETTE, MICHIGAN', 'KSAZ' : 'STAPLES, MINNESOTA', 'KSBA' : 'SANTA BARBARA, CALIFORNIA', 'KSBD' : 'SAN BERNARDINO, CALIFORNIA', 'KSBM' : 'SHEBOYGAN, WISCONSIN', 'KSBN' : 'SOUTH BEND, INDIANA', 'KSBP' : 'SAN LUIS OBISPO, CALIFORNIA', 'KSBS' : 'STEAMBOAT SPRINGS, COLORADO', 'KSBY' : 'SALISBURY, MARYLAND', 'KSCH' : 'SCHENECTADY, NEW YORK', 'KSCK' : 'STOCKTON, CALIFORNIA', 'KSDA' : 'SHENANDOAH, IOWA', 'KSDB' : 'SANDBERG, CALIFORNIA', 'KSDF' : 'LOUISVILLE, KENTUCKY', 'KSDL' : 'SCOTTSDALE, ARIZONA', 'KSDM' : 'SAN DIEGO, CALIFORNIA', 'KSDY' : 'SIDNEY, MONTANA', 'KSEA' : 'SEATTLE, WASHINGTON', 'KSEE' : 'SAN DIEGO, CALIFORNIA', 'KSEG' : 'SELINSGROVE, PENNSYLVANIA', 'KSEM' : 'SELMA, ALABAMA', 'KSEP' : 'STEPHENVILLE, TEXAS', 'KSET' : 'ST CHARLES, MISSOURI', 'KSEW' : 'SEATTLE, WASHINGTON', 'KSEZ' : 'SEDONA, ARIZONA', 'KSFB' : 'ORLANDO, FLORIDA', 'KSFF' : 'SPOKANE, WASHINGTON', 'KSFM' : 'SANFORD, MAINE', 'KSFO' : 'SAN FRANCISCO, CALIFORNIA', 'KSFQ' : 'SUFFOLK, VIRGINIA', 'KSFY' : 'SAVANNA, ILLINOIS', 'KSFZ' : 'PAWTUCKET, RHODE ISLAND', 'KSGF' : 'SPRINGFIELD, MISSOURI', 'KSGH' : 'SPRINGFIELD, OHIO', 'KSGJ' : 'ST AUGUSTINE, FLORIDA', 'KSGR' : 'HOUSTON, TEXAS', 'KSGS' : 'SOUTH ST PAUL, MINNESOTA', 'KSGT' : 'STUTTGART, ARKANSAS', 'KSGU' : 'ST GEORGE, UTAH', 'KSHD' : 'STAUNTON, VIRGINIA', 'KSHL' : 'SHELDON, IOWA', 'KSHN' : 'SHELTON, WASHINGTON', 'KSHR' : 'SHERIDAN, WYOMING', 'KSHV' : 'SHREVEPORT, LOUISIANA', 'KSIY' : 'MONTAGUE, CALIFORNIA', 'KSJC' : 'SAN JOSE, CALIFORNIA', 'KSJN' : 'ST JOHNS, ARIZONA', 'KSJT' : 'SAN ANGELO, TEXAS', 'KSJX' : 'BEAVER ISLAND, MICHIGAN', 'KSKA' : 'SPOKANE, WASHINGTON', 'KSKF' : 'SAN ANTONIO, TEXAS', 'KSKX' : 'TAOS, NEW MEXICO', 'KSLB' : 'STORM LAKE, IOWA', 'KSLC' : 'SALT LAKE CITY, UTAH', 'KSLE' : 'SALEM, OREGON', 'KSLG' : 'SILOAM SPRINGS, ARKANSAS', 'KSLH' : 'CHEBOYGAN, MICHIGAN', 'KSLI' : 'LOS ALAMITOS, CALIFORNIA', 'KSLK' : 'SARANAC LAKE, NEW YORK', 'KSLN' : 'SALINA, KANSAS', 'KSLO' : 'SALEM, ILLINOIS', 'KSLR' : 'SULPHUR SPRINGS, TEXAS', 'KSME' : 'SOMERSET, KENTUCKY', 'KSMF' : 'SACRAMENTO, CALIFORNIA', 'KSMN' : 'SALMON, IDAHOAHO', 'KSMO' : 'SANTA MONICA, CALIFORNIA', 'KSMQ' : 'SOMERVILLE, NEW JERSEY', 'KSMX' : 'SANTA MARIA, CALIFORNIA', 'KSNA' : 'SANTA ANA, CALIFORNIA', 'KSNC' : 'CHESTER, CONNECTICUT', 'KSNK' : 'SNYDER, TEXAS', 'KSNL' : 'SHAWNEE, OKLAHOMA', 'KSNS' : 'SALINAS, CALIFORNIA', 'KSNT' : 'STANLEY, IDAHO', 'KSNY' : 'SIDNEY, NEBRASKA', 'KSOA' : 'SONORA, TEXAS', 'KSOP' : 'PINEHURST, NORTH CAROLINA', 'KSOW' : 'SHOW LOW, ARIZONA', 'KSPA' : 'SPARTANBURG, SOUTH CAROLINA', 'KSPB' : 'SCAPPOOSE, OREGON', 'KSPD' : 'SPRINGFIELD, COLORADO', 'KSPF' : 'SPEARFISH, SOUTH DAKOTA', 'KSPG' : 'ST PETERSBURG, FLORIDA', 'KSPI' : 'SPRINGFIELD, ILLINOIS', 'KSPL' : 'SOUTH PADRE ISLAND, TEXAS', 'KSPS' : 'WICHITA FALLS, TEXAS', 'KSPW' : 'SPENCER, IOWA', 'KSQI' : 'STERLING, ILLINOIS', 'KSQL' : 'SAN CARLOS, CALIFORNIA', 'KSRC' : 'SEARCY, ARKANSAS', 'KSRE' : 'SEMINOLE, OKLAHOMA', 'KSRQ' : 'SARASOTA, FLORIDA', 'KSRR' : 'RUIDOSO, NEW MEXICO', 'KSSC' : 'SUMTER, SOUTH CAROLINA,', 'KSSF' : 'SAN ANTONIO, TEXAS', 'KSSI' : 'BRUNSWICK, GEORGIA', 'KSTC' : 'ST CLOUD, MINNESOTA', 'KSTE' : 'STEVENS POINT, WISCONSIN', 'KSTJ' : 'ST JOSEPH, MISSOURI', 'KSTL' : 'ST LOUIS, MISSOURI', 'KSTP' : 'ST PAUL, MINNESOTA', 'KSTS' : 'SANTA ROSA, CALIFORNIA', 'KSTT' : 'CHARLOTTE AMALIE, VIRGIN ISLANDS', 'KSTX' : 'CHRISTIANSTED, VIRGIN ISLANDS', 'KSUA' : 'STUART, FLORIDA', 'KSUE' : 'STURGEON BAY, WISCONSIN', 'KSUN' : 'HAILEY, IDAHOAHO', 'KSUS' : 'ST LOUIS, MISSOURI', 'KSUT' : 'OAK ISLAND, NORTH CAROLINA', 'KSUU' : 'FAIRFIELD, CALIFORNIA', 'KSUW' : 'SUPERIOR, WISCONSIN', 'KSUX' : 'SIOUX CITY, IOWA', 'KSVC' : 'SILVER CITY, NEW MEXICO', 'KSVE' : 'SUSANVILLE, CALIFORNIA', 'KSVH' : 'STATESVILLE, NORTH CAROLINA', 'KSVN' : 'SAVANNAH, GEORGIA', 'KSWD' : 'SEWARD, ALASKA', 'KSWF' : 'NEWBURGH, NEW YORK', 'KSWO' : 'STILLWATER, OKLAHOMA', 'KSWW' : 'SWEETWATER, TEXAS', 'KSXT' : 'SEXTON SUMMIT, OREGON', 'KSYN' : 'STANTON, MINNESOTA', 'KSYR' : 'SYRACUSE, NEW YORK', 'KSZL' : 'KNOB NOSTER, MISSOURI', 'KSZN' : 'SANTA CRUZ, CALIFORNIA', 'KSZT' : 'SANDPOINT, IDAHOAHO', 'KT65' : 'WESLACO, TEXAS', 'KT82' : 'FREDERICKSBURG, TEXAS', 'KTAD' : 'TRINIDAD, COLORADO', 'KTAN' : 'TAUNTON, MASSACHUSETTS', 'KTAZ' : 'TAYLORVILLE, ILLINOIS', 'KTBN' : 'FORT LEONARD WOOD, MISSOURI', 'KTBR' : 'STATESBORO, GEORGIA', 'KTBW' : 'TAMPA, FLORIDA', 'KTCC' : 'TUCUMCARI, NEW MEXICO', 'KTCL' : 'TUSCALOOSA, ALABAMA', 'KTCM' : 'SEATTLE, WASHINGTON', 'KTCS' : 'TRUTH OR CONSEQUENCES, NEW MEXICO', 'KTDF' : 'ROXBORO, NORTH CAROLINA', 'KTDO' : 'TOLEDO, WASHINGTON', 'KTDZ' : 'TOLEDO, OHIO', 'KTEB' : 'TETERBORO, NEW JERSEY', 'KTEW' : 'MASON, MICHIGAN', 'KTEX' : 'TELLURIDE, COLORADO', 'KTFX' : 'GREAT FALLS, MONTANA', 'KTHV' : 'YORK, PENNSYLVANIA', 'KTIF' : 'THEDFORD, NEBRASKA', 'KTIK' : 'OKLAHOMA CITY, OKLAHOMA', 'KTIP' : 'RANTOUL, ILLINOIS', 'KTIW' : 'TACOMA, WASHINGTON', 'KTIX' : 'TITUSVILLE, FLORIDA', 'KTKC' : 'TRACY, MINNESOTA', 'KTKI' : 'DALLAS, TEXAS', 'KTKV' : 'TOMAHAWK, WISCONSIN', 'KTLH' : 'TALLAHASSEE, FLORIDA', 'KTMB' : 'MIAMI, FLORIDA', 'KTNB' : 'BOONE, NORTH CAROLINA', 'KTNU' : 'NEWTON, IOWA', 'KTNX' : 'MELLAN, NEVADA', 'KTOA' : 'TORRANCE, CALIFORNIA', 'KTOB' : 'DODGE CENTER, MINNESOTA', 'KTOI' : 'TROY, ALABAMA', 'KTOL' : 'TOLEDO, OHIO', 'KTOP' : 'TOPEKA, KANSAS', 'KTOR' : 'TORRINGTON, WYOMING', 'KTPA' : 'TAMPA, FLORIDA', 'KTPF' : 'TAMPA, FLORIDA', 'KTPH' : 'TONOPAH, NEVADA', 'KTPL' : 'TEMPLE, TEXAS', 'KTQE' : 'TEKAMAH, NEBRASKA', 'KTQH' : 'TAHLEQUAH, OKLAHOMA', 'KTRI' : 'BRISTOL, TENNESSEE', 'KTRK' : 'TRUCKEE, CALIFORNIA', 'KTRL' : 'TERRELL, TEXAS', 'KTRM' : 'PALM SPRINGS, CALIFORNIA', 'KTTA' : 'SANFORD, NORTH CAROLINA', 'KTTD' : 'PORTLAND, OREGON', 'KTTF' : 'MONROE, MICHIGAN', 'KTTN' : 'TRENTON, NEW JERSEY', 'KTTS' : 'CAPE KENNEDY, FLORIDA', 'KTUL' : 'TULSA, OKLAHOMA', 'KTUP' : 'TUPELO, MISSISSIPPI', 'KTUS' : 'TUCSON, ARIZONA', 'KTVC' : 'TRAVERSE CITY, MICHIGAN', 'KTVF' : 'THIEF RIVER FALLS, MINNESOTA', 'KTVI' : 'THOMASVILLE, GEORGIA', 'KTVL' : 'SOUTH LAKE TAHOE, CALIFORNIA', 'KTVR' : 'TALLULAH, LOUISIANA', 'KTWF' : 'TWIN FALLS, IDAHOAHO', 'KTWM' : 'TWO HARBORS, MINNESOTA', 'KTXK' : 'TEXARKANA, ARKANSAS', 'KTYR' : 'TYLER, TEXAS', 'KTYS' : 'KNOXVILLE, TENNESSEE', 'KTZR' : 'COLUMBUS, OHIO', 'KU16' : 'HILL RANGE, UTAH', 'KU24' : 'DELTA, UTAH', 'KU42' : 'SALT LAKE CITY, UTAH', 'KU67' : 'ROOSEVELT, UTAH', 'KU78' : 'SODA SPRINGS, IDAHO', 'KUAO' : 'AURORA, OREGON', 'KUCA' : 'UTICA, NEW YORK', 'KUCP' : 'NEW CASTLE, PENNSYLVANIA', 'KUDG' : 'DARLINGTON, SOUTH CAROLINA', 'KUES' : 'WAUKESHA, WISCONSIN', 'KUGN' : 'CHICAGO, ILLINOIS', 'KUIL' : 'QUILLAYUTE, WASHINGTON', 'KUIN' : 'QUINCY, ILLINOIS', 'KUKF' : 'NORTH WILKESBORO, NORTH CAROLINA', 'KUKI' : 'UKIAH, CALIFORNIA', 'KUKL' : 'BURLINGTON, KANSAS', 'KUKT' : 'QUAKERTOWN, PENNSYLVANIA', 'KULM' : 'NEW ULM, MINNESOTA', 'KUNO' : 'WEST PLAINS, MISSOURI', 'KUNU' : 'JUNEAU, WISCONSIN', 'KUNV' : 'STATE COLLEGE, PENNSYLVANIA', 'KUOX' : 'OXFORD, MISSISSIPPI', 'KUTA' : 'TUNICA, MISSISSIPPI', 'KUTS' : 'HUNTSVILLE, TEXAS', 'KUUU' : 'NEWPORT, RHODE ISLAND', 'KUVA' : 'UVALDE, TEXAS', 'KUZA' : 'ROCK HILL, SOUTH CAROLINA', 'KVAD' : 'VALDOSTA, GEORGIA', 'KVAY' : 'MOUNT HOLLY, NEW JERSEY', 'KVBG' : 'VANDENBERG, CALIFORNIA', 'KVBT' : 'BENTONVILLE, ARKANSAS', 'KVCB' : 'VACAVILLE, CALIFORNIA', 'KVCT' : 'VICTORIA, TEXAS', 'KVCV' : 'VICTORVILLE, CALIFORNIA', 'KVDF' : 'TAMPA, FLORIDA', 'KVDI' : 'VIDALIA, GEORGIA', 'KVDW' : 'VEDAUWOO, WYOMING', 'KVEL' : 'VERNAL, UTAH', 'KVGT' : 'LAS VEGAS, NEVADA', 'KVIH' : 'ROLLA, MISSOURI', 'KVIS' : 'VISALIA, CALIFORNIA', 'KVJI' : 'ABINGDON, VIRGINIA', 'KVKS' : 'VICKSBURG, MISSISSIPPI', 'KVLD' : 'VALDOSTA, GEORGIA', 'KVLL' : 'TROY, MICHIGAN', 'KVNY' : 'VAN NUYS, CALIFORNIA', 'KVOK' : 'VOLK, WISCONSIN', 'KVPC' : 'CARTERSVILLE, GEORGIA', 'KVPS' : 'VALPARAISO, FLORIDA', 'KVPZ' : 'VALPARAISO, INDIANA', 'KVQQ' : 'JACKSONVILLE, FLORIDA', 'KVRB' : 'VERO BEACH, FLORIDA', 'KVSF' : 'SPRINGFIELD, VERMONT', 'KVTA' : 'NEWARK, OHIO', 'KVTI' : 'VINTON, IOWA', 'KVTN' : 'VALENTINE, NEBRASKA', 'KVTP' : 'LA VETA PASS, COLORADO', 'KVUJ' : 'ALBEMARLE, NORTH CAROLINA', 'KVUO' : 'VANCOUVER, WASHINGTON', 'KVVG' : 'LADY LAKE, FLORIDA', 'KVVV' : 'ORTONVILLE, MINNESOTA', 'KVWU' : 'WASKISH, MINNESOTA', 'KVYS' : 'PERU, ILLINOIS', 'KW22' : 'BUCKHANNON, WEST VIRGINIA', 'KW39' : 'ROCHE HARBOR SPB, WASHINGTON', 'KW45' : 'LURAY, VIRGINIA', 'KW51' : 'CAPE CHARLES, VIRGINIA', 'KW63' : 'CLARKSVILLE, VIRGINIA', 'KW99' : 'TOWN HILL, WEST VIRGINIA', 'KWAL' : 'CHINCOTEAGUE, VIRGINIA', 'KWDG' : 'ENID, OKLAHOMA', 'KWDR' : 'WINDER, GEORGIA', 'KWHP' : 'LOS ANGELES, CALIFORNIA', 'KWJF' : 'LANCASTER, CALIFORNIA', 'KWLD' : 'WINFIELD, KANSAS', 'KWMC' : 'WINNEMUCCA, NEVADA', 'KWRB' : 'WARNER ROBINS, GEORGIA', 'KWRI' : 'WRIGHTSTOWN, NEW JERSEY', 'KWRL' : 'WORLAND, WYOMING', 'KWST' : 'WESTERLY, RHODE ISLAND', 'KWVI' : 'WATSONVILLE, CALIFORNIA', 'KWVL' : 'WATERVILLE, MAINE', 'KWWD' : 'WILDWOOD, NEW JERSEY', 'KWWR' : 'WOODWARD, OKLAHOMA', 'KWYS' : 'WEST YELLOWSTONE, MONTANA', 'KX21' : 'TITUSVILLE, FLORIDA', 'KXBP' : 'BRIDGEPORT, TEXAS', 'KXMR' : 'COCOA BEACH, FLORIDA', 'KXNA' : 'FAYETTEVILLE, ARKANSAS', 'KXVG' : 'LONGVILLE, MINNESOTA', 'KY19' : 'MANDAN, NORTH DAKOTA', 'KY50' : 'WAUTOMA, WISCONSIN', 'KY51' : 'VIROQUA, WISCONSIN', 'KY63' : 'ELBOW LAKE, MINNESOTA', 'KYIP' : 'DETROIT, MICHIGAN', 'KYKM' : 'YAKIMA, WASHINGTON', 'KYKN' : 'YANKTON, SOUTH DAKOTA', 'KYNG' : 'YOUNGSTOWN, OHIO', 'KYUM' : 'YUMA, ARIZONA', 'KZAB' : 'ALBUQUERQUE, NEW MEXICO', 'KZLC' : 'SALT LAKE CITY, UTAH', 'KZSE' : 'AUBURN, WASHINGTON', 'KZZV' : 'ZANESVILLE, OHIO', 'PAAK' : 'ATKA, ALASKA', 'PAAP' : 'PORT ALEXANDER, ALASKA', 'PAAQ' : 'PALMER, ALASKA', 'PABA' : 'BARTER ISLAND LRRS, ALASKA', 'PABE' : 'BETHEL, ALASKA', 'PABG' : 'BELUGA, ALASKA', 'PABI' : 'DELTA JUNCTION, ALASKA', 'PABL' : 'BUCKLAND, ALASKA', 'PABN' : 'NABESNA, ALASKA', 'PABR' : 'BARROW, ALASKA', 'PABT' : 'BETTLES, ALASKA', 'PABV' : 'BIRCHWOOD, ALASKA', 'PACD' : 'COLD BAY, ALASKA', 'PACE' : 'CENTRAL, ALASKA', 'PACL' : 'CLEAR, ALASKA', 'PACP' : 'CAPE SAINT ELIAS, ALASKA', 'PACR' : 'CIRCLE, ALASKA', 'PACS' : 'CAPE SARICHEF, ALASKA', 'PACV' : 'CORDOVA, ALASKA', 'PACY' : 'YAKATAGA, ALASKA', 'PACZ' : 'CAPE ROMANZOFF, ALASKA', 'PADE' : 'DEERING, ALASKA', 'PADK' : 'ADAK ISLAND, ALASKA', 'PADL' : 'DILLINGHAM, ALASKA', 'PADQ' : 'KODIAK, ALASKA', 'PADT' : 'SLANA, ALASKA', 'PADU' : 'UNALASKA, ALASKA', 'PAEC' : 'CHULITNA, ALASKA', 'PAED' : 'ANCHORAGE, ALASKA', 'PAEG' : 'EAGLE, ALASKA', 'PAEH' : 'CAPE NEWENHAM, ALASKA', 'PAEI' : 'FAIRBANKS, ALASKA', 'PAEL' : 'ELFIN COVE, ALASKA', 'PAEM' : 'EMMONAK, ALASKA', 'PAEN' : 'KENAI, ALASKA', 'PAER' : 'MERRILL PASS WEST, ALASKA', 'PAFA' : 'FAIRBANKS, ALASKA', 'PAFB' : 'FAIRBANKS, ALASKA', 'PAFE' : 'KAKE, ALASKA', 'PAFK' : 'FAREWELL LAKE, ALASKA', 'PAFM' : 'AMBLER, ALASKA', 'PAFR' : 'FORT RICHARDSON, ALASKA', 'PAFW' : 'FAREWELL, ALASKA', 'PAGA' : 'GALENA, ALASKA', 'PAGB' : 'GALBRAITH LAKE, ALASKA', 'PAGK' : 'GULKANA, ALASKA', 'PAGL' : 'GOLOVIN, ALASKA', 'PAGM' : 'GAMBELL, ALASKA', 'PAGN' : 'ANGOON, ALASKA', 'PAGS' : 'GUSTAVUS, ALASKA', 'PAGT' : 'NIGHTMUTE, ALASKA', 'PAGY' : 'SKAGWAY, ALASKA', 'PAHL' : 'HUSLIA, ALASKA', 'PAHN' : 'HAINES, ALASKA', 'PAHO' : 'HOMER, ALASKA', 'PAHP' : 'HOOPER BAY, ALASKA', 'PAHV' : 'HEALY, ALASKA', 'PAHY' : 'HYDABURG, ALASKA', 'PAHZ' : 'HAYES RIVER, ALASKA', 'PAII' : 'EGEGIK, ALASKA', 'PAIK' : 'KIANA, ALASKA', 'PAIL' : 'ILIAMNA, ALASKA', 'PAIM' : 'UTOPIA CREEK, ALASKA', 'PAIN' : 'MCKINLEY PARK, ALASKA', 'PAIW' : 'WALES, ALASKA', 'PAIZ' : 'LAZY MTN, ALASKA', 'PAJB' : 'BIORKA ISLAND, ALASKA', 'PAJC' : 'CHIGNIK, ALASKA', 'PAJN' : 'JUNEAU, ALASKA', 'PAJO' : 'CAPE HINCHINBROOK, ALASKA', 'PAJV' : 'SUTTON, ALASKA', 'PAKI' : 'KIPNUK, ALASKA', 'PAKK' : 'KOYUK, ALASKA', 'PAKN' : 'KING SALMON, ALASKA', 'PAKO' : 'NIKOLSKI, ALASKA', 'PAKP' : 'ANAKTUVUK PASS, ALASKA', 'PAKT' : 'KETCHIKAN, ALASKA', 'PAKU' : 'KUPARUK, ALASKA', 'PAKV' : 'KALTAG, ALASKA', 'PAKW' : 'KLAWOCK, ALASKA', 'PALH' : 'ANCHORAGE, ALASKA', 'PALJ' : 'PORT ALSWORTH, ALASKA', 'PALK' : 'SNOWSHOE LAKE, ALASKA', 'PALP' : 'DEADHORSE, ALASKA', 'PALR' : 'CHANDALAR LAKE, ALASKA', 'PALU' : 'CAPE LISBURNE, ALASKA', 'PALV' : 'BIG RIVER LAKE, ALASKA', 'PAMC' : 'MCGRATH, ALASKA', 'PAMD' : 'MIDDLETON ISLAND, ALASKA', 'PAMH' : 'MINCHUMINA, ALASKA', 'PAML' : 'MANLEY HOT SPRINGS, ALASKA', 'PAMM' : 'METLAKATLA, ALASKA', 'PAMR' : 'ANCHORAGE, ALASKA', 'PAMX' : 'MCCARTHY, ALASKA', 'PAMY' : 'MEKORYUK, ALASKA', 'PANC' : 'ANCHORAGE, ALASKA', 'PANI' : 'ANIAK, ALASKA', 'PANN' : 'NENANA, ALASKA', 'PANR' : 'FUNTER BAY, ALASKA', 'PANT' : 'ANNETTE ISLAND, ALASKA', 'PANV' : 'ANVIK, ALASKA', 'PAOH' : 'HOONAH, ALASKA', 'PAOM' : 'NOME, ALASKA', 'PAOR' : 'NORTHWAY, ALASKA', 'PAOT' : 'KOTZEBUE, ALASKA', 'PAPB' : 'ST GEORGE, ALASKA', 'PAPC' : 'PORT CLARENCE, ALASKA', 'PAPG' : 'PETERSBURG, ALASKA', 'PAPH' : 'PORT HEIDEN, ALASKA', 'PAPM' : 'PLATINUM, ALASKA', 'PAPO' : 'POINT HOPE, ALASKA', 'PAPR' : 'PROSPECT CREEK, ALASKA', 'PAPT' : 'PUNTILLA, ALASKA', 'PAQT' : 'NUIQSUT, ALASKA', 'PARC' : 'ARCTIC VILLAGE, ALASKA', 'PARD' : 'RED DOG MINE, ALASKA', 'PARL' : 'CENTRAL, ALASKA', 'PASA' : 'SAVOONGA, ALASKA', 'PASC' : 'DEADHORSE, ALASKA', 'PASD' : 'SAND POINT, ALASKA', 'PASH' : 'SHISHMAREF, ALASKA', 'PASI' : 'SITKA, ALASKA', 'PASK' : 'SELAWIK, ALASKA', 'PASL' : 'SLEETMUTE, ALASKA', 'PASM' : 'ST MARY\'S, ALASKA', 'PASN' : 'ST PAUL ISLAND, ALASKA', 'PASO' : 'SELDOVIA, ALASKA', 'PASP' : 'SHEEP MOUNTAIN, ALASKA', 'PASV' : 'SPARREVOHN, ALASKA', 'PASW' : 'SKWENTNA, ALASKA', 'PASX' : 'SOLDOTNA, ALASKA', 'PASY' : 'SHEMYA, ALASKA', 'PATA' : 'TANANA, ALASKA', 'PATC' : 'TIN CITY, ALASKA', 'PATE' : 'TELLER, ALASKA', 'PATG' : 'TOGIAK VILLAGE, ALASKA', 'PATJ' : 'TOK, ALASKA', 'PATK' : 'TALKEETNA, ALASKA', 'PATL' : 'TATALINA, ALASKA', 'PATO' : 'WHITTIER, ALASKA', 'PATW' : 'CANTWELL, ALASKA', 'PAUM' : 'UMIAT, ALASKA', 'PAUN' : 'UNALAKLEET, ALASKA', 'PAUO' : 'WILLOW, ALASKA', 'PAVD' : 'VALDEZ, ALASKA', 'PAVL' : 'KIVALINA, ALASKA', 'PAVW' : 'VALDEZ, ALASKA', 'PAWD' : 'SEWARD, ALASKA', 'PAWG' : 'WRANGELL, ALASKA', 'PAWI' : 'WAINWRIGHT, ALASKA', 'PAWN' : 'NOATAK, ALASKA', 'PAWR' : 'WHITTIER, ALASKA', 'PAWS' : 'WASILLA, ALASKA', 'PAXK' : 'PAXSON, ALASKA', 'PAYA' : 'YAKUTAT, ALASKA', 'PAZK' : 'EUREKA, ALASKA', 'PFYU' : 'FORT YUKON, ALASKA', 'PGRO' : 'ROTA ISLAND, NORTH MARIANA ISLANDS', 'PGSN' : 'SAIPAN ISLAND, NORTH MARIANA ISLANDS', 'PGUM' : 'GUAM, GUAM', 'PGWT' : 'TINIAN ISLAND, NORTH MARIANA ISLANDS', 'PHBK' : 'KEKAHA, HAWAII', 'PHFO' : 'HONOLULU, HAWAII', 'PHHI' : 'WAHIAWA, HAWAII', 'PHHN' : 'HANA, HAWAII', 'PHIK' : 'HONOLULU, HAWAII', 'PHJH' : 'LAHAINA, HAWAII', 'PHJR' : 'KAPOLEI, HAWAII', 'PHKO' : 'KAILUA, HAWAII', 'PHLI' : 'LIHUE, HAWAII', 'PHMK' : 'KAUNAKAKAI, HAWAII', 'PHMO' : 'MOLOKAI, HAWAII', 'PHMU' : 'KAMUELA, HAWAII', 'PHNG' : 'KANEOHE, HAWAII', 'PHNL' : 'HONOLULU, HAWAII', 'PHNY' : 'LANAI CITY, HAWAII', 'PHOG' : 'KAHULUI, HAWAII', 'PHSF' : 'POHAKULOA, HAWAII', 'PHTO' : 'HILO, HAWAII', 'PHWH' : 'SOUTH KONA, HAWAII', 'PMDY' : 'MIDWAY ATOLL, MIDWAY ATOLL', 'POLI' : 'OLIKTOK POINT, ALASKA', 'PPIZ' : 'POINT LAY, ALASKA', }
cuppa-joe/dsame
defs.py
Python
isc
274,490
0.029815
# ============================================================================== # Copyright (C) 2011 Diego Duclos # Copyright (C) 2011-2018 Anton Vorobyov # # This file is part of Eos. # # Eos is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Eos is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public License # along with Eos. If not, see <http://www.gnu.org/licenses/>. # ============================================================================== from abc import ABCMeta from abc import abstractmethod from collections import namedtuple from eos.const.eos import Restriction from eos.const.eve import AttrId from eos.restriction.exception import RestrictionValidationError from .base import BaseRestriction ResourceErrorData = namedtuple( 'ResourceErrorData', ('total_use', 'output', 'item_use')) class ResourceRestriction(BaseRestriction, metaclass=ABCMeta): """Base class for all resource restrictions. Resources in this context is something produced by ship/character and consumed by other items. """ def __init__(self, fit): self.__fit = fit @property @abstractmethod def _stat_name(self): """This name will be used to get numbers from stats service.""" ... @property @abstractmethod def _use_attr_id(self): ... def validate(self): # Use stats module to get resource use and output stats = getattr(self.__fit.stats, self._stat_name) total_use = stats.used # Can be None, so fall back to 0 in this case output = stats.output or 0 # If we're not out of resource, do nothing if total_use <= output: return tainted_items = {} for item in stats._users: resource_use = item.attrs[self._use_attr_id] # Ignore items which do not actually consume resource if resource_use <= 0: continue tainted_items[item] = ResourceErrorData( total_use=total_use, output=output, item_use=resource_use) raise RestrictionValidationError(tainted_items) class CpuRestriction(ResourceRestriction): """CPU use by items should not exceed ship CPU output. Details: For validation, stats module data is used. """ type = Restriction.cpu _stat_name = 'cpu' _use_attr_id = AttrId.cpu class PowergridRestriction(ResourceRestriction): """Power grid use by items should not exceed ship power grid output. Details: For validation, stats module data is used. """ type = Restriction.powergrid _stat_name = 'powergrid' _use_attr_id = AttrId.power class CalibrationRestriction(ResourceRestriction): """Calibration use by items should not exceed ship calibration output. Details: For validation, stats module data is used. """ type = Restriction.calibration _stat_name = 'calibration' _use_attr_id = AttrId.upgrade_cost class DroneBayVolumeRestriction(ResourceRestriction): """Drone bay volume use by items should not exceed ship drone bay volume. Details: For validation, stats module data is used. """ type = Restriction.dronebay_volume _stat_name = 'dronebay' _use_attr_id = AttrId.volume class DroneBandwidthRestriction(ResourceRestriction): """Drone bandwidth use by items should not exceed ship drone bandwidth. Details: For validation, stats module data is used. """ type = Restriction.drone_bandwidth _stat_name = 'drone_bandwidth' _use_attr_id = AttrId.drone_bandwidth_used
pyfa-org/eos
eos/restriction/restriction/resource.py
Python
lgpl-3.0
4,071
0