Dataset Viewer
blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
sequencelengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
8163546f2fecc94e106eccfc43d3314cb39e1bbd | 26594819e61d1a5f290bb579c5326adbfcce0373 | /training/config.py | 25dc1f6c9a640b11046d9c9ff42d75afc367f761 | [] | no_license | overminder/fyp | 50ba90987fbfc5788d4021d943eebb2027adea45 | a9fe79a5a04589ee1866981c68ff8404cc7efeba | refs/heads/master | 2021-01-23T06:26:37.631000 | 2012-05-15T07:19:32 | 2012-05-15T07:19:32 | 1,816,661 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 94 | py | from util import local_path
def get_database_path():
return local_path('store.sqlite3')
| [
"p90eri@gmail.com"
] | p90eri@gmail.com |
af5ee455cb7393efd56233ca1556032ce3b6435c | 4c68778814b938d91d184749b50940549439c0f3 | /scheme/fields/time.py | fe6e0bb58b391be8c8074c6fe7792ac82fede471 | [
"LicenseRef-scancode-warranty-disclaimer",
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | jordanm/scheme | 96a747258ce68de756ffe7996b37c3e8747a740c | 5a87e24b35bb2f80b474273bf2e5c5fd563214e0 | refs/heads/master | 2021-01-17T05:48:51.479000 | 2020-01-20T16:03:28 | 2020-01-20T16:03:28 | 32,604,302 | 8 | 4 | NOASSERTION | 2020-01-20T16:03:29 | 2015-03-20T20:05:12 | Python | UTF-8 | Python | false | false | 3,174 | py | from __future__ import absolute_import
from datetime import time
from time import strptime
from scheme.exceptions import *
from scheme.field import *
__all__ = ('Time',)
class Time(Field):
"""A field for time values."""
basetype = 'time'
equivalent = time
parameters = {'maximum': None, 'minimum': None}
pattern = '%H:%M:%S'
errors = [
FieldError('invalid', 'invalid value', '%(field)s must be a time value'),
FieldError('minimum', 'minimum value', '%(field)s must not occur before %(minimum)s'),
FieldError('maximum', 'maximum value', '%(field)s must not occur after %(maximum)s'),
]
def __init__(self, minimum=None, maximum=None, **params):
super(Time, self).__init__(**params)
if maximum is not None:
try:
maximum = self._unserialize_value(maximum)
except InvalidTypeError:
raise TypeError("argument 'maximum' must be either None, a datetime.time,"
" or a string in the format 'HH:MM:SS'")
if minimum is not None:
try:
minimum = self._unserialize_value(minimum)
except InvalidTypeError:
raise TypeError("argument 'minimum' must be either None, a datetime.time,"
" or a string in the format 'HH:MM:SS'")
self.maximum = maximum
self.minimum = minimum
def __repr__(self):
aspects = []
if self.minimum is not None:
aspects.append('minimum=%r' % self.minimum)
if self.maximum is not None:
aspects.append('maximum=%r' % self.maximum)
return super(Time, self).__repr__(aspects)
def describe(self, parameters=None, verbose=False):
params = {}
if self.maximum is not None:
params['maximum'] = self.maximum.strftime(self.pattern)
if self.minimum is not None:
params['minimum'] = self.minimum.strftime(self.pattern)
return super(Time, self).describe(parameters=parameters, verbose=verbose, **params)
def _serialize_value(self, value):
return value.strftime(self.pattern)
def _unserialize_value(self, value, ancestry=None):
if isinstance(value, time):
return value
try:
return time(*strptime(value, self.pattern)[3:6])
except Exception:
raise InvalidTypeError(identity=ancestry, field=self,
value=value).construct('invalid')
def _validate_value(self, value, ancestry):
if not isinstance(value, time):
raise InvalidTypeError(identity=ancestry, field=self,
value=value).construct('invalid')
minimum = self.minimum
if minimum is not None and value < minimum:
raise ValidationError(identity=ancestry, field=self, value=value).construct('minimum',
minimum=minimum.strftime(self.pattern))
maximum = self.maximum
if maximum is not None and value > maximum:
raise ValidationError(identity=ancestry, field=self, value=value).construct('maximum',
maximum=maximum.strftime(self.pattern))
| [
"mccoy.jordan@gmail.com"
] | mccoy.jordan@gmail.com |
6885e4c483c0399abfd20154156beeadf8b508af | d048a865519b5f944e1430c6181d00399c979d9c | /gallery/gallery/urls.py | f765f9c8680d91d33f2171654643694a2b0f21ad | [] | no_license | jithinvijayan007/PaceWisdom- | 5f84261c4ba7f51e25c8c21074b48214a24cb6d2 | 1ba00814a757edb327923afcaf20fe04652efa0e | refs/heads/master | 2023-03-06T04:00:21.729000 | 2021-02-21T18:56:54 | 2021-02-21T18:56:54 | 340,974,786 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 835 | py | """gallery URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
urlpatterns = [
path('admin/', admin.site.urls),
path('',include('user.urls')),
path('',include('img_gallery.urls')),
]
| [
"jithinvijayan007@gmail.com"
] | jithinvijayan007@gmail.com |
48cd42cf70cd98648276cce423fd29d9850f9d0a | f2ab8ccda7203dd37d61facb9978cf74b781c7f1 | /tests/apps.py | 863cf58e139c91b4d865bed2d8a46b94a061f588 | [
"MIT"
] | permissive | Apkawa/easy-thumbnails-admin | 1991137224dcd117520b2c114d4012daf803776e | 9d7a38f215cdac53a663b00f1d4ff3a3c2a54eb4 | refs/heads/master | 2021-01-01T15:47:34.334000 | 2017-11-23T10:38:09 | 2017-11-23T10:38:09 | 97,703,157 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 274 | py | try:
from django.apps import AppConfig
except ImportError:
# Early Django versions import everything in test, avoid the failure due to
# AppConfig only existing in 1.7+
AppConfig = object
class TestConfig(AppConfig):
name = 'tests'
label = 'tests'
| [
"apkawa@gmail.com"
] | apkawa@gmail.com |
eed58a6b703faab6b504f4b3a66b4de43ae04f0a | e75521f26a9a6fdbd0b9dbe396b14a5f3c1af305 | /src/repositories/word_classifier_repository.py | 10cf90739a261923161b283cb2b1127ab1de82cd | [] | no_license | Ap3lsin4k/words-as-part-of-speech | 2636edb87d309d44d3d18add14aadd13f7810507 | e7f35d56d65a8f5033498f650265cadbd742a9de | refs/heads/master | 2023-01-31T19:01:11.007000 | 2020-12-15T10:57:20 | 2020-12-15T10:57:20 | 320,807,979 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,258 | py | from bookmark_entity import Bookmark
from language_entity import LanguageEntity
from repositories.dictionary_surfer_common import DictionarySurferRepository
class WordClassifierRepository(DictionarySurferRepository):
def __init__(self, dictionary_entity: LanguageEntity):
super().__init__(dictionary_entity)
def make_response_model(self, part_of_speech, input_word):
self.result = {part_of_speech: {}}
for category_of_property, properties in self.dictionary[part_of_speech].items():
bookmark = Bookmark(part_of_speech, category_of_property)
self.__classify_word_by_property(bookmark, input_word)
if len(self.result[part_of_speech]) == 0:
self.result = None
def __save_property_of_word_to_presentable_format(self, bookmark):
self.result[bookmark.get_part_of_speech()].update({bookmark.category_name: bookmark.property_name})
def __classify_word_by_property(self, bookmark, input_word):
for bookmark.property_name in self.dictionary.get_properties(bookmark):
words_tuple = self.dictionary.get_words_for_property(bookmark)
if input_word in words_tuple:
self.__save_property_of_word_to_presentable_format(bookmark) | [
"andrii.fedorko01@gmail.com"
] | andrii.fedorko01@gmail.com |
f6fece3b5719a65008ae0fbe700a817b469a7a51 | e7eff96df8160d3c238bf38068c99c7b8bd3005b | /norman/web/frontend/crops.py | 08fa8b6415e718d05231de41cdbcfc0273dddb39 | [] | no_license | sumansai14/norman | 62c3760b47f15bb474786ac045efad5aff757b95 | 43a8c4e53830d57eb552c3ecb98bf2926c9d0457 | refs/heads/master | 2021-03-16T07:57:17.076000 | 2017-05-23T07:36:37 | 2017-05-23T07:36:37 | 92,188,183 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 170 | py | from norman.web.frontend.base import BaseAuthTemplateView
class OrganizationCropsListView(BaseAuthTemplateView):
template_name = 'norman/organization/crops_list.html'
| [
"suman.sai14@gmail.com"
] | suman.sai14@gmail.com |
2cf1cde00eea109a46c3e5983b4906feef72866f | f0856e60a095ce99ec3497b3f27567803056ac60 | /keras2/keras66_gradient2.py | 0e0d0cc1f27912ef32b11753f760a7606dd315f8 | [] | no_license | hjuju/TF_Study-HAN | dcbac17ce8b8885f5fb7d7f554230c2948fda9ac | c0faf98380e7f220868ddf83a9aaacaa4ebd2c2a | refs/heads/main | 2023-09-04T09:13:33.212000 | 2021-10-27T08:00:49 | 2021-10-27T08:00:49 | 384,371,952 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 479 | py | import numpy as np
import matplotlib.pyplot as plt
f = lambda x: x**2 - 4 * x + 6
gradient = lambda x: 2*x - 4 # f 미분 -> 미분한 값이 0이 되는 지점이 가장 낮은지점 -> 우리가 찾는 지점
x0 = 0.0
MaxIter = 20
learning_rate = 0.25
print("step\tx\tf(x)")
print("{:02d}\t{:6.5f}\t{:6.5f}".format(0, x0, f(x0)))
for i in range(MaxIter):
x1 = x0 - learning_rate * gradient(x0)
x0 = x1
print("{:02d}\t{:6.5f}\t{:6.5f}".format(i+1, x0, f(x0)))
| [
"tkackeowjs@naver.com"
] | tkackeowjs@naver.com |
fb2c64c0218df858e821204c4c485f29f4b33c74 | e0527bce5c53a196752d3a16adf50cb60754de5f | /10-How to Stop Programs Crashing Demos/3-is_square.py | 8bf01fcece7fa35279f95d25ece62fa140398965 | [] | no_license | ARWA-ALraddadi/python-tutorial-for-beginners | ddeb657f419fbc176bea273bc9fb6b88d1894191 | 21cedfc47871ca4d25c2382464c60ab0a2121205 | refs/heads/master | 2023-06-30T20:24:30.688000 | 2021-08-08T08:22:29 | 2021-08-08T08:22:29 | 193,094,651 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,066 | py | ################################################################
##
## As a demonstration of a function which applies defensive
## programming in different ways, consider a predicate
## which is intended to return True if a given natural
## number (i.e., a non-negative integer) is a square of
## another natural number.
##
## From this description the function could be "misused" in
## three ways:
##
## 1) It could be given a negative number.
## 2) It could be given a floating point number.
## 3) It could be given a value which is not a number at
## all.
##
## By adding some "defensive" code we can make a naive
## implementation more robust by responding appropriately
## to each of these cases:
##
## 1) A negative number can never be a square of another
## number, so we can always return False in this case.
## Here we choose to do so "silently", not drawing
## attention to the unexpected value at all, since the
## answer returned is still "correct" mathematically.
## 2) A positive floating point number could be a square of
## a natural number so, even though we're not required
## to handle floating point numbers we can still do so,
## but choose to generate a "warning" message in this
## case.
## 3) If the function is given a non-numerical value it
## is reasonable to assume that something is seriously
## wrong with the calling code, so in this case we
## generate an "error" message and return the special
## value None.
#---------------------------------------------------------
# Return True if the given natural number is the square of
# some other natural number
def is_square(natural_number):
from math import sqrt
# Three "defensive" checks follow
## # Check that the parameter is a number
## if not (isinstance(natural_number, int) or isinstance(natural_number, float)):
## print('ERROR - parameter must be numeric, given:', repr(natural_number))
## return None
##
## # Check that the parameter is positive
## if natural_number < 0:
## return False
##
## # Check that the parameter is a natural number
## if isinstance(natural_number, float):
## print('Warning - expected natural, given float:', natural_number)
# Return True if the number's square root is a whole number
return sqrt(natural_number) % 1 == 0
#---------------------------------------------------------
# Some tests
#
# The first of these tests is a "valid" one, but the remaining
# three all provide unexpected inputs. Uncommenting the
# "defensive" checks above will cause the function to respond
# appropriately. (It will crash until the defensive code is
# uncommented. Why?)
print(is_square(36)) # expected input
print()
print(is_square(-1)) # unexpected input, but handled silently
print()
print(is_square(225.0)) # unexpected input, handled with warning
print()
print(is_square('August')) # unexpected input, handled as an error
| [
"noreply@github.com"
] | ARWA-ALraddadi.noreply@github.com |
7bbfd94accf83c65ae4546356bccb460b15a900e | b8ea631aae5d132c7b0236684d5f7c12d3c222be | /Library/Graph/Dijkstra_heapq.py | 6164198b7fcd573492928ce2f82d98e051b23864 | [] | no_license | Ryushi-tech/card3 | 68c429313142e58d4722a1cd5a4acc4ab39ca41f | 883636b2f518e38343a12816c5c641b60a87c098 | refs/heads/master | 2021-07-05T22:46:33.089000 | 2020-12-12T15:31:00 | 2020-12-12T15:31:00 | 209,176,836 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 708 | py | import heapq
def dijkstra(s):
q = []
dist[s] = 0
heapq.heappush(q, [0, s])
while q:
p, v = heapq.heappop(q)
if dist[v] < p:
continue
for i, x in g[v]:
if dist[i] > dist[v] + x:
dist[i] = dist[v] + x
heapq.heappush(q, [dist[i], i])
n = int(input())
g = [[] for _ in range(n)]
for _ in range(n - 1):
a, b, c = map(int, input().split())
a, b = a - 1, b - 1
g[a].append((b, c))
g[b].append((a, c))
inf = 10 ** 14
dist = [inf] * n
m, k = map(int, input().split())
k = k - 1
dijkstra(k)
for _ in range(m):
e, f = map(int, input().split())
res = dist[e - 1] + dist[f - 1]
print(res)
| [
"mryrys@gmail.com"
] | mryrys@gmail.com |
c7e2d80388cbe425136e01a06bdb2ea24fa604c6 | 9e988c0dfbea15cd23a3de860cb0c88c3dcdbd97 | /sdBs/AllRun/sdssj9-10_163557.64+341427.0/sdB_sdssj9-10_163557.64+341427.0_coadd.py | 39e21f206956741881cd664d37e0bb5ecdba667f | [] | no_license | tboudreaux/SummerSTScICode | 73b2e5839b10c0bf733808f4316d34be91c5a3bd | 4dd1ffbb09e0a599257d21872f9d62b5420028b0 | refs/heads/master | 2021-01-20T18:07:44.723000 | 2016-08-08T16:49:53 | 2016-08-08T16:49:53 | 65,221,159 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 498 | py | from gPhoton.gMap import gMap
def main():
gMap(band="NUV", skypos=[248.990167,34.240833], skyrange=[0.0333333333333,0.0333333333333], stepsz = 30., cntfile="/data2/fleming/GPHOTON_OUTPUT/LIGHTCURVES/sdBs/sdB_sdssj9-10_163557.64+341427.0/sdB_sdssj9-10_163557.64+341427.0_movie_count.fits", cntcoaddfile="/data2/fleming/GPHOTON_OUTPUT/LIGHTCURVES/sdB/sdB_sdssj9-10_163557.64+341427.0/sdB_sdssj9-10_163557.64+341427.0_count_coadd.fits", overwrite=True, verbose=3)
if __name__ == "__main__":
main()
| [
"thomas@boudreauxmail.com"
] | thomas@boudreauxmail.com |
bde86714c9e9dcc484f3f18212f3921c491fe222 | e50ba4cc303d4165bef9e2917103c084cfbe0e07 | /rating_app/migrations/0016_auto_20201129_1156.py | 25f2b5ff3130d55f5d492b5c185861041cf00086 | [
"MIT"
] | permissive | Antony-me/Ratemyapp | 09049fce54d3a3ed2b256970e7840d20942e8c84 | e547fea82439a3e4f83aa78bf16f93b1ea9ab00b | refs/heads/main | 2023-01-28T16:52:58.635000 | 2020-12-01T16:49:07 | 2020-12-01T16:49:07 | 316,425,507 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 477 | py | # Generated by Django 3.1.3 on 2020-11-29 11:56
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('rating_app', '0015_profilemerch'),
]
operations = [
migrations.AlterField(
model_name='profilemerch',
name='projects',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='rating_app.post'),
),
]
| [
"antonymunyasia993@gmail.com"
] | antonymunyasia993@gmail.com |
f82a7850addf3773f1ce92a89e4d51f96cf3f763 | 487ce91881032c1de16e35ed8bc187d6034205f7 | /codes/CodeJamCrawler/16_0_2_neat/16_0_2_tkdkop_pancake.py | 259ec04a68548d92ceed7f438162fc6b46baa760 | [] | no_license | DaHuO/Supergraph | 9cd26d8c5a081803015d93cf5f2674009e92ef7e | c88059dc66297af577ad2b8afa4e0ac0ad622915 | refs/heads/master | 2021-06-14T16:07:52.405000 | 2016-08-21T13:39:13 | 2016-08-21T13:39:13 | 49,829,508 | 2 | 0 | null | 2021-03-19T21:55:46 | 2016-01-17T18:23:00 | Python | UTF-8 | Python | false | false | 286 | py | #!/usr/bin/env python
import sys
import itertools
m = sys.stdin.readline()
i = 0
for line in sys.stdin.readlines():
line = line.strip()
i += 1
out_str = "Case #%d: " % i
line += '+'
k = itertools.groupby(line)
out_str += str(len(list(k))-1)
print out_str
| [
"[dhuo@tcd.ie]"
] | [dhuo@tcd.ie] |
4723c6f7c093e3989d133cebab10e0c13bf512a0 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03997/s926418877.py | acd277945016fcae9d48adcc8806653b1aeeec5f | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763000 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 58 | py | a,b,c,d=eval('['+'int(input()),'*3+'0]');print((a+b)*c//2) | [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
cdaec89a7ecfa4ae8042bf31ac073b89b8a58072 | a3387fbcc918acb55d289ffb61b9fb603203dc11 | /Puzzles/2022-01/01_22_balanced_days.py | 924f5189761f280c72866b5565b743883fbda28e | [] | no_license | fbhs-cs/purdys-puzzles | 13e970ff909ff2e093b3b9d9777faac47c099913 | 1cf3f9c52677843fad781e46304e1485a91aae58 | refs/heads/master | 2023-08-17T06:28:06.659000 | 2023-08-09T14:45:43 | 2023-08-09T14:45:43 | 212,085,565 | 4 | 3 | null | null | null | null | UTF-8 | Python | false | false | 1,069 | py | from math import ceil
def is_balanced(num):
n = str(num)
first = n[:ceil(len(n)/2)]
last = n[len(n)//2:]
#print(first,last)
if sum([int(x) for x in first]) == sum([int(x) for x in last]):
return True
else:
return False
def count_balanced(n):
count = 0
for i in range(1,n):
if is_balanced(i):
count += 1
return count
def sum_balanced(n):
total = 0
for i in range(1,n):
if is_balanced(i):
#print(i)
total += i
return total
def find_balanced_dates():
months = {1:31,2:28,3:31,4:30,5:31,6:30,
7:31,8:31,9:30,10:31,11:30,12:31}
count = 0
sum = 0
for month in range(1,13):
for day in range(1,months[month]+1):
day_num = str(month) + str(day) + '2022'
if is_balanced(int(day_num)):
count += 1
sum += int(day_num)
print(day_num)
print(count)
print(sum)
find_balanced_dates()
| [
"cpurdy@flourbluffschools.net"
] | cpurdy@flourbluffschools.net |
808ac7632e66327e3f8d1fe634dab41d619f065e | 786de89be635eb21295070a6a3452f3a7fe6712c | /CorAna/tags/V00-00-04/src/ConfigParametersCorAna.py | 8baf5f326ca6758d621cc3f9f8cf43ac75c28720 | [] | no_license | connectthefuture/psdmrepo | 85267cfe8d54564f99e17035efe931077c8f7a37 | f32870a987a7493e7bf0f0a5c1712a5a030ef199 | refs/heads/master | 2021-01-13T03:26:35.494000 | 2015-09-03T22:22:11 | 2015-09-03T22:22:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 31,606 | py | #--------------------------------------------------------------------------
# File and Version Information:
# $Id$
#
# Description:
# Module ConfigParametersCorAna...
#
#------------------------------------------------------------------------
"""Is intended as a storage for configuration parameters for CorAna project.
This software was developed for the LCLS project. If you use all or
part of it, please give an appropriate acknowledgment.
@version $Id: template!python!py 4 2008-10-08 19:27:36Z salnikov $
@author Mikhail S. Dubrovin
"""
#------------------------------
# Module's version from CVS --
#------------------------------
__version__ = "$Revision: 4 $"
# $Source$
#--------------------------------
# Imports of standard modules --
#--------------------------------
import sys
import os
from copy import deepcopy
#-----------------------------
# Imports for other modules --
#-----------------------------
#import ConfigParameters as cpbase
from ConfigParameters import * # ConfigParameters
from Logger import logger
from PyQt4 import QtGui # for icons only...
import AppDataPath as apputils # for icons
#---------------------
# Class definition --
#---------------------
class ConfigParametersCorAna ( ConfigParameters ) :
"""Is intended as a storage for configuration parameters for CorAna project.
#@see BaseClass ConfigParameters
#@see OtherClass Parameters
"""
list_pars = []
def __init__ ( self, fname=None ) :
"""Constructor.
@param fname the file name with configuration parameters, if not specified then it will be set to the default value at declaration.
"""
ConfigParameters.__init__(self)
self.declareCorAnaParameters()
self.readParametersFromFile ( fname )
self.initRunTimeParameters()
self.defineStyles()
def initRunTimeParameters( self ) :
self.char_expand = u' \u25BE' # down-head triangle
self.iconsAreLoaded = False
self.plotarray_is_on = False
self.plotg2_is_on = False
self.autoRunStatus = 0 # 0=inctive, 1=split, 2=process, 3=merge
#self.plotimgspe = None
self.plotimgspe_g = None
#-----------------------------
def setIcons(self) :
if self.iconsAreLoaded : return
self.iconsAreLoaded = True
path_icon_contents = apputils.AppDataPath('CorAna/icons/contents.png').path()
path_icon_mail_forward = apputils.AppDataPath('CorAna/icons/mail-forward.png').path()
path_icon_button_ok = apputils.AppDataPath('CorAna/icons/button_ok.png').path()
path_icon_button_cancel = apputils.AppDataPath('CorAna/icons/button_cancel.png').path()
path_icon_exit = apputils.AppDataPath('CorAna/icons/exit.png').path()
path_icon_home = apputils.AppDataPath('CorAna/icons/home.png').path()
path_icon_redo = apputils.AppDataPath('CorAna/icons/redo.png').path()
path_icon_undo = apputils.AppDataPath('CorAna/icons/undo.png').path()
path_icon_reload = apputils.AppDataPath('CorAna/icons/reload.png').path()
path_icon_save = apputils.AppDataPath('CorAna/icons/save.png').path()
path_icon_save_cfg = apputils.AppDataPath('CorAna/icons/fileexport.png').path()
path_icon_edit = apputils.AppDataPath('CorAna/icons/edit.png').path()
path_icon_browser = apputils.AppDataPath('CorAna/icons/fileopen.png').path()
path_icon_monitor = apputils.AppDataPath('CorAna/icons/icon-monitor.png').path()
path_icon_unknown = apputils.AppDataPath('CorAna/icons/icon-unknown.png').path()
path_icon_logviewer = apputils.AppDataPath('CorAna/icons/logviewer.png').path()
path_icon_locked = apputils.AppDataPath('CorAna/icons/locked-icon.png').path()
path_icon_unlocked = apputils.AppDataPath('CorAna/icons/unlocked-icon.png').path()
self.icon_contents = QtGui.QIcon(path_icon_contents )
self.icon_mail_forward = QtGui.QIcon(path_icon_mail_forward)
self.icon_button_ok = QtGui.QIcon(path_icon_button_ok)
self.icon_button_cancel = QtGui.QIcon(path_icon_button_cancel)
self.icon_exit = QtGui.QIcon(path_icon_exit )
self.icon_home = QtGui.QIcon(path_icon_home )
self.icon_redo = QtGui.QIcon(path_icon_redo )
self.icon_undo = QtGui.QIcon(path_icon_undo )
self.icon_reload = QtGui.QIcon(path_icon_reload )
self.icon_save = QtGui.QIcon(path_icon_save )
self.icon_save_cfg = QtGui.QIcon(path_icon_save_cfg )
self.icon_edit = QtGui.QIcon(path_icon_edit )
self.icon_browser = QtGui.QIcon(path_icon_browser )
self.icon_monitor = QtGui.QIcon(path_icon_monitor )
self.icon_unknown = QtGui.QIcon(path_icon_unknown )
self.icon_logviewer = QtGui.QIcon(path_icon_logviewer)
self.icon_lock = QtGui.QIcon(path_icon_locked )
self.icon_unlock = QtGui.QIcon(path_icon_unlocked )
#base_dir = '/usr/share/icons/Bluecurve/24x24/'
#self.icon_contents = QtGui.QIcon(base_dir + 'actions/contents.png')
#self.icon_mail_forward = QtGui.QIcon(base_dir + '../../gnome/24x24/actions/mail-forward.png')
#self.icon_button_ok = QtGui.QIcon(base_dir + 'actions/button_ok.png')
#self.icon_button_cancel = QtGui.QIcon(base_dir + 'actions/button_cancel.png')
#self.icon_exit = QtGui.QIcon(base_dir + 'actions/exit.png')
#self.icon_home = QtGui.QIcon(base_dir + 'actions/gohome.png')
#self.icon_redo = QtGui.QIcon(base_dir + 'actions/redo.png')
#self.icon_undo = QtGui.QIcon(base_dir + 'actions/undo.png')
#self.icon_reload = QtGui.QIcon(base_dir + 'actions/reload.png')
#self.icon_stop = QtGui.QIcon(base_dir + 'actions/stop.png')
#self.icon_save_cfg = QtGui.QIcon(base_dir + 'actions/fileexport.png')
#self.icon_save = QtGui.QIcon(base_dir + 'stock/stock-save.png')
#self.icon_edit = QtGui.QIcon(base_dir + 'actions/edit.png')
#self.icon_browser = QtGui.QIcon(base_dir + 'actions/fileopen.png')
#self.icon_monitor = QtGui.QIcon(base_dir + 'apps/icon-monitor.png')
#self.icon_unknown = QtGui.QIcon(base_dir + 'apps/icon-unknown.png')
#self.icon_logviewer = QtGui.QIcon(base_dir + '../32x32/apps/logviewer.png')
self.icon_logger = self.icon_edit
self.icon_help = self.icon_unknown
self.icon_reset = self.icon_reload
#-----------------------------
def declareCorAnaParameters( self ) :
# Possible typs for declaration : 'str', 'int', 'long', 'float', 'bool'
# GUIInstrExpRun.py.py
# self.fname_cp = self.declareParameter( name='FNAME_CONFIG_PARS', val_def='confpars.txt', type='str' )
# self.fname_ped = self.declareParameter( name='FNAME_PEDESTALS', val_def='my_ped.txt', type='str' )
# self.fname_dat = self.declareParameter( name='FNAME_DATA', val_def='my_dat.txt', type='str' )
# self.instr_dir = self.declareParameter( name='INSTRUMENT_DIR', val_def='/reg/d/psdm', type='str' )
# self.instr_name = self.declareParameter( name='INSTRUMENT_NAME', val_def='XCS', type='str' )
# self.exp_name = self.declareParameter( name='EXPERIMENT_NAME', val_def='xcsi0112', type='str' )
# self.str_run_number = self.declareParameter( name='RUN_NUMBER', val_def='0015', type='str' )
# self.str_run_number_dark= self.declareParameter( name='RUN_NUMBER_DARK', val_def='0014', type='str' )
# GUIMainTB.py
# GUIMainSplit.py
self.current_tab = self.declareParameter( name='CURRENT_TAB' , val_def='Files', type='str' )
# GUILogger.py
self.log_level = self.declareParameter( name='LOG_LEVEL_OF_MSGS', val_def='info', type='str' )
# GUIFiles.py
self.current_file_tab = self.declareParameter( name='CURRENT_FILE_TAB' , val_def='Work/Results', type='str' )
# GUIRun.py
self.current_run_tab = self.declareParameter( name='CURRENT_RUN_TAB' , val_def='Input', type='str' )
# GUIWorkResDirs.py
self.dir_work = self.declareParameter( name='DIRECTORY_WORK', val_def='./work', type='str' )
self.dir_results = self.declareParameter( name='DIRECTORY_RESULTS', val_def='./results', type='str' )
self.fname_prefix = self.declareParameter( name='FILE_NAME_PREFIX', val_def='cora-', type='str' )
self.fname_prefix_cora = self.declareParameter( name='FILE_NAME_PREFIX_CORA', val_def='cora-proc', type='str' )
# GUIDark.py
self.use_dark_xtc_all = self.declareParameter( name='USE_DARK_XTC_ALL_CHUNKS', val_def=True, type='bool' )
self.in_dir_dark = self.declareParameter( name='IN_DIRECTORY_DARK', val_def='/reg/d/psdm/XCS/xcsi0112/xtc',type='str' )
self.in_file_dark = self.declareParameter( name='IN_FILE_NAME_DARK', val_def='e167-r0020-s00-c00.xtc',type='str' )
# GUIFlatField.py
self.ccdcorr_flatfield = self.declareParameter( name='CCD_CORRECTION_FLATFIELD', val_def=False, type='bool' )
self.dname_flat = self.declareParameter( name='DIRECTORY_FLAT', val_def='.',type='str' )
self.fname_flat = self.declareParameter( name='FILE_NAME_FLAT', val_def='flat_field.txt',type='str' )
#self.in_dir_flat = self.declareParameter( name='IN_DIRECTORY_FLAT', val_def='/reg/d/psdm/XCS/xcsi0112/xtc',type='str' )
#self.in_file_flat = self.declareParameter( name='IN_FILE_NAME_FLAT', val_def='e167-r0020-s00-c00.xtc',type='str' )
# GUIBlemish.py
self.ccdcorr_blemish = self.declareParameter( name='CCD_CORRECTION_BLEMISH', val_def=False, type='bool' )
self.dname_blem = self.declareParameter( name='DIRECTORY_BLEM', val_def='.',type='str' )
self.fname_blem = self.declareParameter( name='FILE_NAME_BLEM', val_def='blemish.txt',type='str' )
#self.in_dir_blem = self.declareParameter( name='IN_DIRECTORY_BLEM', val_def='/reg/d/psdm/XCS/xcsi0112/xtc',type='str' )
#self.in_file_blem = self.declareParameter( name='IN_FILE_NAME_BLEM', val_def='e167-r0020-s00-c00.xtc',type='str' )
# GUIData.py
self.use_data_xtc_all = self.declareParameter( name='USE_DATA_XTC_ALL_CHUNKS', val_def=True, type='bool' )
self.is_active_data_gui = self.declareParameter( name='IS_ACTIVE_DATA_GUI', val_def=True, type='bool' )
self.in_dir_data = self.declareParameter( name='IN_DIRECTORY_DATA', val_def='/reg/d/psdm/XCS/xcsi0112/xtc',type='str' )
self.in_file_data = self.declareParameter( name='IN_FILE_NAME_DATA', val_def='e167-r0020-s00-c00.xtc',type='str' )
# GUISetupBeamZero.py
self.x_coord_beam0 = self.declareParameter( name='X_COORDINATE_BEAM_ZERO', val_def=1234.5, type='float' )
self.y_coord_beam0 = self.declareParameter( name='Y_COORDINATE_BEAM_ZERO', val_def=1216.5, type='float' )
self.x0_pos_in_beam0 = self.declareParameter( name='X_CCD_POS_IN_BEAM_ZERO', val_def=-59, type='float' )
self.y0_pos_in_beam0 = self.declareParameter( name='Y_CCD_POS_IN_BEAM_ZERO', val_def=175, type='float' )
# GUISetupSpecular.py
self.x_coord_specular = self.declareParameter( name='X_COORDINATE_SPECULAR', val_def=-1, type='float' )
self.y_coord_specular = self.declareParameter( name='Y_COORDINATE_SPECULAR', val_def=-2, type='float' )
self.x0_pos_in_specular = self.declareParameter( name='X_CCD_POS_IN_SPECULAR', val_def=-3, type='float' )
self.y0_pos_in_specular = self.declareParameter( name='Y_CCD_POS_IN_SPECULAR', val_def=-4, type='float' )
# GUISetupData.py
self.x0_pos_in_data = self.declareParameter( name='X_CCD_POS_IN_DATA', val_def=-51, type='float' )
self.y0_pos_in_data = self.declareParameter( name='Y_CCD_POS_IN_DATA', val_def=183, type='float' )
# GUISetupInfoLeft.py
self.sample_det_dist = self.declareParameter( name='SAMPLE_TO_DETECTOR_DISTANCE', val_def=4000.1, type='float' )
self.exp_setup_geom = self.declareParameter( name='EXP_SETUP_GEOMETRY', val_def='Baem Zero', type='str' )
self.photon_energy = self.declareParameter( name='PHOTON_ENERGY', val_def=7.6543, type='float' )
self.nominal_angle = self.declareParameter( name='NOMINAL_ANGLE', val_def=-1, type='float' )
self.real_angle = self.declareParameter( name='REAL_ANGLE', val_def=-1, type='float' )
# GUIImgSizePosition.py
self.col_begin = self.declareParameter( name='IMG_COL_BEGIN', val_def=0, type='int' )
self.col_end = self.declareParameter( name='IMG_COL_END', val_def=1339, type='int' )
self.row_begin = self.declareParameter( name='IMG_ROW_BEGIN', val_def=1, type='int' )
self.row_end = self.declareParameter( name='IMG_ROW_END', val_def=1299, type='int' )
# GUIKineticMode.py
self.kin_mode = self.declareParameter( name='KINETICS_MODE', val_def='Non-Kinetics',type='str' )
self.kin_win_size = self.declareParameter( name='KINETICS_WIN_SIZE', val_def=1, type='int' )
self.kin_top_row = self.declareParameter( name='KINETICS_TOP_ROW', val_def=2, type='int' )
self.kin_slice_first = self.declareParameter( name='KINETICS_SLICE_FIRST', val_def=3, type='int' )
self.kin_slice_last = self.declareParameter( name='KINETICS_SLICE_LAST', val_def=4, type='int' )
# GUISetupPars.py
self.bat_num = self.declareParameter( name='BATCH_NUM', val_def= 1, type='int' )
self.bat_num_max = self.declareParameter( name='BATCH_NUM_MAX', val_def= 9, type='int' )
#self.bat_data_is_used = self.declareParameter( name='BATCH_DATA_IS_USED', val_def=True, type='bool' )
self.bat_data_start = self.declareParameter( name='BATCH_DATA_START', val_def= 1, type='int' )
self.bat_data_end = self.declareParameter( name='BATCH_DATA_END' , val_def=-1, type='int' )
self.bat_data_total = self.declareParameter( name='BATCH_DATA_TOTAL', val_def=-1, type='int' )
self.bat_data_time = self.declareParameter( name='BATCH_DATA_TIME' , val_def=-1.0, type='float' )
self.bat_data_dt_ave = self.declareParameter( name='BATCH_DATA_DT_AVE', val_def=-1.0, type='float' )
self.bat_data_dt_rms = self.declareParameter( name='BATCH_DATA_DT_RMS', val_def=0.0, type='float' )
self.bat_dark_is_used = self.declareParameter( name='BATCH_DARK_IS_USED', val_def=True, type='bool' )
self.bat_dark_start = self.declareParameter( name='BATCH_DARK_START', val_def= 1, type='int' )
self.bat_dark_end = self.declareParameter( name='BATCH_DARK_END' , val_def=-1, type='int' )
self.bat_dark_total = self.declareParameter( name='BATCH_DARK_TOTAL', val_def=-1, type='int' )
self.bat_dark_time = self.declareParameter( name='BATCH_DARK_TIME' , val_def=-1.0, type='float' )
self.bat_dark_dt_ave = self.declareParameter( name='BATCH_DARK_DT_AVE', val_def=-1.0, type='float' )
self.bat_dark_dt_rms = self.declareParameter( name='BATCH_DARK_DT_RMS', val_def=0.0, type='float' )
#self.bat_flat_is_used = self.declareParameter( name='BATCH_FLAT_IS_USED', val_def=True, type='bool' )
self.bat_flat_start = self.declareParameter( name='BATCH_FLAT_START', val_def= 1, type='int' )
self.bat_flat_end = self.declareParameter( name='BATCH_FLAT_END' , val_def=-1, type='int' )
self.bat_flat_total = self.declareParameter( name='BATCH_FLAT_TOTAL', val_def=-1, type='int' )
self.bat_flat_time = self.declareParameter( name='BATCH_FLAT_TIME' , val_def=-1.0, type='float' )
self.bat_queue = self.declareParameter( name='BATCH_QUEUE', val_def='psfehq', type='str' )
self.bat_det_info = self.declareParameter( name='BATCH_DET_INFO', val_def='DetInfo(:Princeton)', type='str' )
#self.bat_det_info = self.declareParameter( name='BATCH_DET_INFO', val_def='DetInfo(XcsBeamline.0:Princeton.0)', type='str' )
self.bat_img_rec_mod = self.declareParameter( name='BATCH_IMG_REC_MODULE', val_def='ImgAlgos.PrincetonImageProducer', type='str' )
# BatchLogParser.py
self.bat_img_rows = self.declareParameter( name='BATCH_IMG_ROWS', val_def= -1, type='int' )
self.bat_img_cols = self.declareParameter( name='BATCH_IMG_COLS', val_def= -1, type='int' )
self.bat_img_size = self.declareParameter( name='BATCH_IMG_SIZE', val_def= -1, type='int' )
self.bat_img_nparts = self.declareParameter( name='BATCH_IMG_NPARTS', val_def= 8, type='int' )
# GUIAnaSettingsLeft.py
self.ana_type = self.declareParameter( name='ANA_TYPE', val_def='Static',type='str' )
self.ana_stat_meth_q = self.declareParameter( name='ANA_STATIC_METHOD_Q', val_def='evenly-spaced',type='str' )
self.ana_stat_meth_phi = self.declareParameter( name='ANA_STATIC_METHOD_PHI', val_def='evenly-spaced',type='str' )
self.ana_dyna_meth_q = self.declareParameter( name='ANA_DYNAMIC_METHOD_Q', val_def='evenly-spaced',type='str' )
self.ana_dyna_meth_phi = self.declareParameter( name='ANA_DYNAMIC_METHOD_PHI', val_def='evenly-spaced',type='str' )
self.ana_stat_part_q = self.declareParameter( name='ANA_STATIC_PARTITION_Q', val_def='1',type='str' )
self.ana_stat_part_phi = self.declareParameter( name='ANA_STATIC_PARTITION_PHI', val_def='2',type='str' )
self.ana_dyna_part_q = self.declareParameter( name='ANA_DYNAMIC_PARTITION_Q', val_def='3',type='str' )
self.ana_dyna_part_phi = self.declareParameter( name='ANA_DYNAMIC_PARTITION_PHI', val_def='4',type='str' )
self.ana_mask_type = self.declareParameter( name='ANA_MASK_TYPE', val_def='no-mask',type='str' )
self.ana_mask_fname = self.declareParameter( name='ANA_MASK_FILE', val_def='./roi-mask.txt',type='str' )
self.ana_mask_dname = self.declareParameter( name='ANA_MASK_DIRECTORY', val_def='.',type='str' )
# GUIAnaSettingsRight.py
self.ana_ndelays = self.declareParameter( name='ANA_NDELAYS_PER_MTAU_LEVEL', val_def=4, type='int' )
self.ana_nslice_delays = self.declareParameter( name='ANA_NSLICE_DELAYS_PER_MTAU_LEVEL', val_def=4, type='int' )
self.ana_npix_to_smooth= self.declareParameter( name='ANA_NPIXELS_TO_SMOOTH', val_def=100, type='int' )
self.ana_smooth_norm = self.declareParameter( name='ANA_SMOOTH_SYM_NORM', val_def=False, type='bool' )
self.ana_two_corfuns = self.declareParameter( name='ANA_TWO_TIME_CORFUNS_CONTROL', val_def=False, type='bool' )
self.ana_spec_stab = self.declareParameter( name='ANA_CHECK_SPECKLE_STABILITY', val_def=False, type='bool' )
self.lld_type = self.declareParameter( name='LOW_LEVEL_DISC_TYPE', val_def='NONE',type='str' )
self.lld_adu = self.declareParameter( name='LOW_LEVEL_DISC_ADU', val_def=15, type='float' )
self.lld_rms = self.declareParameter( name='LOW_LEVEL_DISC_RMS', val_def=4, type='float' )
self.res_ascii_out = self.declareParameter( name='RES_ASCII_OUTPUT', val_def=True, type='bool' )
self.res_fit1 = self.declareParameter( name='RES_PERFORM_FIT1', val_def=False, type='bool' )
self.res_fit2 = self.declareParameter( name='RES_PERFORM_FIT1', val_def=False, type='bool' )
self.res_fit_cust = self.declareParameter( name='RES_PERFORM_FIT_CUSTOM', val_def=False, type='bool' )
self.res_png_out = self.declareParameter( name='RES_PNG_FILES', val_def=False, type='bool' )
self.res_save_log = self.declareParameter( name='RES_SAVE_LOG_FILE', val_def=False, type='bool' )
# GUILoadResults.py
self.res_load_mode = self.declareParameter( name='RES_LOAD_MODE', val_def='NONE',type='str' )
self.res_fname = self.declareParameter( name='RES_LOAD_FNAME', val_def='NONE',type='str' )
# GUISystemSettingsRight.py
self.thickness_type = self.declareParameter( name='THICKNESS_TYPE', val_def='NONORM',type='str' )
self.thickness_sample = self.declareParameter( name='THICKNESS_OF_SAMPLE', val_def=-1, type='float' )
self.thickness_attlen = self.declareParameter( name='THICKNESS_ATTENUATION_LENGTH', val_def=-2, type='float' )
self.ccd_orient = self.declareParameter( name='CCD_ORIENTATION', val_def='180', type='str' )
self.y_is_flip = self.declareParameter( name='Y_IS_FLIPPED', val_def='True', type='bool' )
# GUICCDSettings.py
self.ccdset_pixsize = self.declareParameter( name='CCD_SETTINGS_PIXEL_SIZE', val_def=0.1, type='float' )
self.ccdset_adcsatu = self.declareParameter( name='CCD_SETTINGS_ADC_SATTURATION', val_def=12345, type='int' )
self.ccdset_aduphot = self.declareParameter( name='CCD_SETTINGS_ADU_PER_PHOTON', val_def=123, type='float' )
self.ccdset_ccdeff = self.declareParameter( name='CCD_SETTINGS_EFFICIENCY', val_def=0.55, type='float' )
self.ccdset_ccdgain = self.declareParameter( name='CCD_SETTINGS_GAIN', val_def=0.8, type='float' )
# GUIELogPostingDialog.py
# GUIELogPostingFields.py
#self.elog_post_cbx_state = self.declareParameter( name='ELOG_POST_CBX_STATE', val_def=True, type='bool' )
self.elog_post_rad = self.declareParameter( name='ELOG_POST_RAD_STATE', val_def='Default', type='str' )
self.elog_post_ins = self.declareParameter( name='ELOG_POST_INSTRUMENT', val_def='AMO', type='str' )
self.elog_post_exp = self.declareParameter( name='ELOG_POST_EXPERIMENT', val_def='amodaq09', type='str' )
self.elog_post_run = self.declareParameter( name='ELOG_POST_RUN', val_def='825', type='str' )
self.elog_post_tag = self.declareParameter( name='ELOG_POST_TAG', val_def='TAG1', type='str' )
self.elog_post_res = self.declareParameter( name='ELOG_POST_RESPONCE', val_def='None', type='str' )
self.elog_post_msg = self.declareParameter( name='ELOG_POST_MESSAGE', val_def='EMPTY MSG', type='str' )
self.elog_post_att = self.declareParameter( name='ELOG_POST_ATTACHED_FILE', val_def='None', type='str' )
#GUIViewControl.py
self.vc_cbx_show_more = self.declareParameter( name='SHOW_MORE_BUTTONS', val_def=True, type='bool' )
#-----------------------------
imon_names = [ ('BldInfo(FEEGasDetEnergy)', None ,'str'), \
('BldInfo(XCS-IPM-02)', None ,'str'), \
('BldInfo(XCS-IPM-mono)', None ,'str'), \
('DetInfo(XcsBeamline.1:Ipimb.4)', None ,'str'), \
('DetInfo(XcsBeamline.1:Ipimb.5)', None ,'str') ]
self.imon_name_list = self.declareListOfPars( 'IMON_NAMES', imon_names )
#-----------------------------
imon_short_names = [ ('FEEGasDetEnergy', None ,'str'), \
('XCS-IPM-02', None ,'str'), \
('XCS-IPM-mono', None ,'str'), \
('Ipimb.4', None ,'str'), \
('Ipimb.5', None ,'str') ]
self.imon_short_name_list = self.declareListOfPars( 'IMON_SHORT_NAMES', imon_short_names )
#-----------------------------
imon_cbxs = [ (True, True ,'bool'), \
(True, True ,'bool'), \
(True, True ,'bool'), \
(True, True ,'bool'), \
(True, True ,'bool') ]
self.imon_ch1_list = self.declareListOfPars( 'IMON_CH1', deepcopy(imon_cbxs) )
self.imon_ch2_list = self.declareListOfPars( 'IMON_CH2', deepcopy(imon_cbxs) )
self.imon_ch3_list = self.declareListOfPars( 'IMON_CH3', deepcopy(imon_cbxs) )
self.imon_ch4_list = self.declareListOfPars( 'IMON_CH4', deepcopy(imon_cbxs) )
#-----------------------------
imon_norm_cbx = [ (False, False ,'bool'), \
(False, False ,'bool'), \
(False, False ,'bool'), \
(False, False ,'bool'), \
(False, False ,'bool') ]
self.imon_norm_cbx_list = self.declareListOfPars( 'IMON_NORM_CBX', imon_norm_cbx )
#-----------------------------
imon_sele_cbx = [ (False, False ,'bool'), \
(False, False ,'bool'), \
(False, False ,'bool'), \
(False, False ,'bool'), \
(False, False ,'bool') ]
self.imon_sele_cbx_list = self.declareListOfPars( 'IMON_SELE_CBX', imon_sele_cbx )
#-----------------------------
imon_sele_min = [ (-1., -1. ,'float'), \
(-1., -1. ,'float'), \
(-1., -1. ,'float'), \
(-1., -1. ,'float'), \
(-1., -1. ,'float') ]
self.imon_sele_min_list = self.declareListOfPars( 'IMON_SELE_MIN', imon_sele_min )
#-----------------------------
imon_sele_max = [ (-1., -1. ,'float'), \
(-1., -1. ,'float'), \
(-1., -1. ,'float'), \
(-1., -1. ,'float'), \
(-1., -1. ,'float') ]
self.imon_sele_max_list = self.declareListOfPars( 'IMON_SELE_MAX', imon_sele_max )
#-----------------------------
self.imon_pars_list = zip( self.imon_name_list,
self.imon_ch1_list,
self.imon_ch2_list,
self.imon_ch3_list,
self.imon_ch4_list,
self.imon_norm_cbx_list,
self.imon_sele_cbx_list,
self.imon_sele_min_list,
self.imon_sele_max_list,
self.imon_short_name_list )
#print self.imon_pars_list
#-----------------------------
def defineStyles( self ) :
self.styleYellowish = "background-color: rgb(255, 255, 220); color: rgb(0, 0, 0);" # Yellowish
self.stylePink = "background-color: rgb(255, 200, 220); color: rgb(0, 0, 0);" # Pinkish
self.styleYellowBkg = "background-color: rgb(255, 255, 120); color: rgb(0, 0, 0);" # Pinkish
self.styleGray = "background-color: rgb(230, 240, 230); color: rgb(0, 0, 0);" # Gray
self.styleGreenish = "background-color: rgb(100, 255, 200); color: rgb(0, 0, 0);" # Greenish
self.styleGreenPure = "background-color: rgb(150, 255, 150); color: rgb(0, 0, 0);" # Green
self.styleBluish = "background-color: rgb(200, 200, 255); color: rgb(0, 0, 0);" # Bluish
self.styleWhite = "background-color: rgb(255, 255, 255); color: rgb(0, 0, 0);"
self.styleRedBkgd = "background-color: rgb(255, 0, 0); color: rgb(0, 0, 0);" # Red background
#self.styleTitle = "background-color: rgb(239, 235, 231, 255); color: rgb(100, 160, 100);" # Gray bkgd
#self.styleTitle = "color: rgb(150, 160, 100);"
self.styleBlue = "color: rgb(000, 000, 255);"
self.styleBuriy = "color: rgb(150, 100, 50);"
self.styleRed = "color: rgb(255, 0, 0);"
self.styleGreen = "color: rgb(0, 150, 0);"
self.styleYellow = "color: rgb(0, 150, 150);"
self.styleBkgd = self.styleYellowish
self.styleTitle = self.styleBuriy
self.styleLabel = self.styleBlue
self.styleEdit = self.styleWhite
self.styleEditInfo = self.styleGreenish
self.styleEditBad = self.styleRedBkgd
self.styleButton = self.styleGray
self.styleButtonOn = self.styleBluish
self.styleButtonClose = self.stylePink
self.styleButtonWarning= self.styleYellowBkg
self.styleButtonGood = self.styleGreenPure
self.styleButtonBad = self.stylePink
self.styleBox = self.styleGray
self.styleCBox = self.styleYellowish
self.styleStatusGood = self.styleGreen
self.styleStatusWarning= self.styleYellow
self.styleStatusAlarm = self.styleRed
self.styleTitleBold = self.styleTitle + 'font-size: 18pt; font-family: Courier; font-weight: bold;'
self.styleWhiteFixed = self.styleWhite + 'font-family: Fixed;'
self.colorEditInfo = QtGui.QColor(100, 255, 200)
self.colorEditBad = QtGui.QColor(255, 0, 0)
self.colorEdit = QtGui.QColor('white')
def printParsDirectly( self ) :
logger.info('Direct use of parameter:' + self.fname_cp .name() + ' ' + self.fname_cp .value(), __name__ )
logger.info('Direct use of parameter:' + self.fname_ped.name() + ' ' + self.fname_ped.value(), __name__ )
logger.info('Direct use of parameter:' + self.fname_dat.name() + ' ' + self.fname_dat.value(), __name__ )
#-----------------------------
confpars = ConfigParametersCorAna (fname=getConfigFileFromInput())
#-----------------------------
#
# In case someone decides to run this module
#
if __name__ == "__main__" :
confpars.printParameters()
#confpars.printParsDirectly()
confpars.saveParametersInFile()
confpars.printListOfPars('IMON_NAMES')
sys.exit ( 'End of test for ConfigParametersCorAna' )
#-----------------------------
| [
"dubrovin@SLAC.STANFORD.EDU@b967ad99-d558-0410-b138-e0f6c56caec7"
] | dubrovin@SLAC.STANFORD.EDU@b967ad99-d558-0410-b138-e0f6c56caec7 |
9567422e1472a65046cf8160b1bdae8fbcf7dcd3 | 080c13cd91a073457bd9eddc2a3d13fc2e0e56ae | /MY_REPOS/awesome-4-new-developers/tensorflow-master/tensorflow/python/types/internal.py | c56c7aa6d7790b4c36d248603f2282e60af08a39 | [
"Apache-2.0"
] | permissive | Portfolio-Projects42/UsefulResourceRepo2.0 | 1dccc8961a09347f124d3ed7c27c6d73b9806189 | 75b1e23c757845b5f1894ebe53551a1cf759c6a3 | refs/heads/master | 2023-08-04T12:23:48.862000 | 2021-09-15T12:51:35 | 2021-09-15T12:51:35 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,129 | py | # Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Types internal to TensorFlow.
These types should not be exported. External code should not rely on these.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# TODO(mdan): Is this strictly needed? Only ops.py really uses it.
class NativeObject(object):
"""Types natively supported by various TF operations.
The most notable example of NativeObject is Tensor.
"""
| [
"bryan.guner@gmail.com"
] | bryan.guner@gmail.com |
abc2e14c55f8110ca3d0bc1403c2b44d4e5fe36e | 026fee65b95206995baf1565f486ab4ed7f7cef9 | /userprofiles/admin.py | 89683d76fdacc00428bfbad69cc1e019d3f01b5e | [] | no_license | santhoshpkumar/pinclone | e8460aab355ebf3e5559d44127d7ccad22667747 | 8bf641df9a4999797731d1d2fb4ff3d78d717e10 | refs/heads/master | 2020-04-03T09:39:27.269000 | 2018-10-08T10:51:51 | 2018-10-08T10:51:51 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 225 | py | from django.contrib import admin
from .models import Profile
# Register your models here.
@admin.register(Profile)
class ProfileAdmin(admin.ModelAdmin):
list_display = ('user', 'bio', 'website', 'birth_date')
| [
"undefined.hlo.o@gmail.com"
] | undefined.hlo.o@gmail.com |
28e7dee0700c6fe42c004b939fcaa2b9ff69d27e | eb64b799ff1d7ef3a244bf8e6f9f4e9118d5cfcd | /homeassistant/components/trafikverket_weatherstation/const.py | 7bb53dc5356a0b8a392104982912658806275659 | [
"Apache-2.0"
] | permissive | JeffLIrion/home-assistant | 53966b81b5d5816679f12fc761f79e8777c738d6 | 8f4ec89be6c2505d8a59eee44de335abe308ac9f | refs/heads/dev | 2023-08-22T09:42:02.399000 | 2022-02-16T01:26:13 | 2022-02-16T01:26:13 | 136,679,169 | 5 | 2 | Apache-2.0 | 2023-09-13T06:59:25 | 2018-06-09T00:58:35 | Python | UTF-8 | Python | false | false | 466 | py | """Adds constants for Trafikverket Weather integration."""
from homeassistant.const import Platform
DOMAIN = "trafikverket_weatherstation"
CONF_STATION = "station"
PLATFORMS = [Platform.SENSOR]
ATTRIBUTION = "Data provided by Trafikverket"
ATTR_MEASURE_TIME = "measure_time"
ATTR_ACTIVE = "active"
NONE_IS_ZERO_SENSORS = {
"air_temp",
"road_temp",
"wind_direction",
"wind_speed",
"wind_speed_max",
"humidity",
"precipitation_amount",
}
| [
"noreply@github.com"
] | JeffLIrion.noreply@github.com |
7642072e77aebda4174a74cfe093db22e6377af7 | 7bd0954e956993df19d833810f9d71b60e2ebb9a | /phasor/utilities/ipynb/hdf.py | b9f7e5b1add89064ffd726859cfe27d4415619ec | [
"Apache-2.0"
] | permissive | aa158/phasor | 5ee0cec4f816b88b0a8ac298c330ed48458ec3f2 | fe86dc6dec3740d4b6be6b88d8eef8566e2aa78d | refs/heads/master | 2021-10-22T09:48:18.556000 | 2019-03-09T18:56:05 | 2019-03-09T18:56:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 169 | py | # -*- coding: utf-8 -*-
"""
"""
from __future__ import division, print_function, unicode_literals
import h5py
from declarative.bunch.hdf_deep_bunch import HDFDeepBunch
| [
"Lee.McCuller@gmail.com"
] | Lee.McCuller@gmail.com |
267f5e570bff6ec85a0e60de98259cea7422da0e | edb37da2fd2d2f048df119db96a6de58fc816ddb | /jumpserver-0.4/zrd/my_blog/article/views.py | 0634c5361e1cf968ac0e81b87ea55908e18fa6b5 | [] | no_license | cucy/2017 | 88f1aa2e8df945162d8259918cf61a138a3422cf | 33bcdd5c9e0717521544e3ea41ade10fbb325c4f | refs/heads/master | 2020-05-21T15:31:39.935000 | 2017-07-10T11:04:29 | 2017-07-10T11:04:29 | 84,629,639 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,766 | py | # coding:utf-8
from django.shortcuts import render
from django.shortcuts import render_to_response
# Create your views here.
from django.http import HttpResponse
from models import SSHInfo
# Create your views here.
try:
from ConfigParser import ConfigParser
except:
from configparser import ConfigParser
try:
import paramiko_client
except:
from . import paramiko_client
def home(request):
# 如果请求里有file
for key in request.FILES:
file = request.FILES[key]
config = ConfigParser() # 读取配置文件
config.readfp(file)
for section in config.sections():
print(section)
host_name = config.get(section, 'host_name')
host = config.get(section, 'host')
port = config.get(section, 'port')
usr = config.get(section, 'username')
pwd = config.get(section, 'password')
new_ssh, create = SSHInfo.objects.update_or_create(
host_name=host_name
, host=host
, port=port
, usr=usr
, pwd=pwd
)
new_ssh.save() # 保存配置信息到数据库
sshs = SSHInfo.objects.all() # 获取所有对象
if len(sshs) > 0:
return render_to_response('sshlist.html', {'sshs': sshs})
else:
return render_to_response('home_view.html')
def run_ssh_cmd(requset):
# 获取所有的信息
sshs = SSHInfo.objects.all()
cmd_res = {}
for ssh in sshs:
client = paramiko_client.ParamikoClient()
client.connect(ssh)
res = client.run_cmd('date') # 执行命令 接收返回
cmd_res[ssh.host_name] = res
return render_to_response('cmd_res.html', {'cmd_res': cmd_res})
| [
"292016176@qq.com"
] | 292016176@qq.com |
d3e3b20b1ce012f78bbc61c3eb7dc31075d016ca | c9094a4ed256260bc026514a00f93f0b09a5d60c | /tests/components/accuweather/test_system_health.py | 749f516e44c748caf05503460e8a72ec34d085d3 | [
"Apache-2.0"
] | permissive | turbokongen/home-assistant | 824bc4704906ec0057f3ebd6d92788e096431f56 | 4ab0151fb1cbefb31def23ba850e197da0a5027f | refs/heads/dev | 2023-03-12T05:49:44.508000 | 2021-02-17T14:06:16 | 2021-02-17T14:06:16 | 50,231,140 | 4 | 1 | Apache-2.0 | 2023-02-22T06:14:30 | 2016-01-23T08:55:09 | Python | UTF-8 | Python | false | false | 1,785 | py | """Test AccuWeather system health."""
import asyncio
from unittest.mock import Mock
from aiohttp import ClientError
from homeassistant.components.accuweather.const import COORDINATOR, DOMAIN
from homeassistant.setup import async_setup_component
from tests.common import get_system_health_info
async def test_accuweather_system_health(hass, aioclient_mock):
"""Test AccuWeather system health."""
aioclient_mock.get("https://dataservice.accuweather.com/", text="")
hass.config.components.add(DOMAIN)
assert await async_setup_component(hass, "system_health", {})
hass.data[DOMAIN] = {}
hass.data[DOMAIN]["0123xyz"] = {}
hass.data[DOMAIN]["0123xyz"][COORDINATOR] = Mock(
accuweather=Mock(requests_remaining="42")
)
info = await get_system_health_info(hass, DOMAIN)
for key, val in info.items():
if asyncio.iscoroutine(val):
info[key] = await val
assert info == {
"can_reach_server": "ok",
"remaining_requests": "42",
}
async def test_accuweather_system_health_fail(hass, aioclient_mock):
"""Test AccuWeather system health."""
aioclient_mock.get("https://dataservice.accuweather.com/", exc=ClientError)
hass.config.components.add(DOMAIN)
assert await async_setup_component(hass, "system_health", {})
hass.data[DOMAIN] = {}
hass.data[DOMAIN]["0123xyz"] = {}
hass.data[DOMAIN]["0123xyz"][COORDINATOR] = Mock(
accuweather=Mock(requests_remaining="0")
)
info = await get_system_health_info(hass, DOMAIN)
for key, val in info.items():
if asyncio.iscoroutine(val):
info[key] = await val
assert info == {
"can_reach_server": {"type": "failed", "error": "unreachable"},
"remaining_requests": "0",
}
| [
"noreply@github.com"
] | turbokongen.noreply@github.com |
1b32ea37e4c7f6126f63d235f5bc196330d2dc7e | d94b6845aeeb412aac6850b70e22628bc84d1d6d | /dimensions_of_motion/geometry.py | d7a317cb08a95e69785f8cd0af032ae5db8a1f29 | [
"CC-BY-4.0",
"Apache-2.0"
] | permissive | ishine/google-research | 541aea114a68ced68736340e037fc0f8257d1ea2 | c1ae273841592fce4c993bf35cdd0a6424e73da4 | refs/heads/master | 2023-06-08T23:02:25.502000 | 2023-05-31T01:00:56 | 2023-05-31T01:06:45 | 242,478,569 | 0 | 0 | Apache-2.0 | 2020-06-23T01:55:11 | 2020-02-23T07:59:42 | Jupyter Notebook | UTF-8 | Python | false | false | 7,466 | py | # coding=utf-8
# Copyright 2023 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# -*- coding: utf-8 -*-
"""Functions for sampling and warping images.
We use texture coordinates to represent points and offsets in images. They go
from (0,0) in the top-left corner of an image to (1,1) in the bottom right. It
is convenient to work with these coordinates rather than counts of pixels,
because they are resolution-independent.
"""
import tensorflow as tf
import tensorflow_addons as tfa
import utils
def check_input_shape(name, tensor, axis, value):
"""Utility function for checking tensor shapes."""
shape = tensor.shape.as_list()
if shape[axis] != value:
raise ValueError('Input "%s": dimension %d should be %s. Shape = %s' %
(name, axis, value, shape))
def pixel_center_grid(height, width):
"""Produce a grid of (x,y) texture-coordinate pairs of pixel centers.
Args:
height: (integer) height, not a tensor
width: (integer) width, not a tensor
Returns:
A tensor of shape [height, width, 2] where each entry gives the (x,y)
texture coordinates of the corresponding pixel center. For example, for
pixel_center_grid(2, 3) the result is:
[[[1/6, 1/4], [3/6, 1/4], [5/6, 1/4]],
[[1/6, 3/4], [3/6, 3/4], [5/6, 3/4]]]
"""
height_float = tf.cast(height, dtype=tf.float32)
width_float = tf.cast(width, dtype=tf.float32)
ys = tf.linspace(0.5 / height_float, 1.0 - 0.5 / height_float, height)
xs = tf.linspace(0.5 / width_float, 1.0 - 0.5 / width_float, width)
xs, ys = tf.meshgrid(xs, ys)
grid = tf.stack([xs, ys], axis=-1)
assert grid.shape.as_list() == [height, width, 2]
return grid
def sample_image(image, coords):
"""Sample points from an image, using bilinear filtering.
Args:
image: [B0, ..., Bn-1, height, width, channels] image data
coords: [B0, ..., Bn-1, ..., 2] (x,y) texture coordinates
Returns:
[B0, ..., Bn-1, ..., channels] image data, in which each value is sampled
with bilinear interpolation from the image at position indicated by the
(x,y) texture coordinates. The image and coords parameters must have
matching batch dimensions B0, ..., Bn-1.
Raises:
ValueError: if shapes are incompatible.
"""
check_input_shape('coords', coords, -1, 2)
tfshape = tf.shape(image)[-3:-1]
height = tf.cast(tfshape[0], dtype=tf.float32)
width = tf.cast(tfshape[1], dtype=tf.float32)
# Resampler expects coordinates where (0,0) is the center of the top-left
# pixel and (width-1, height-1) is the center of the bottom-right pixel.
pixel_coords = coords * [width, height] - 0.5
# tfa.image.resampler only works with exactly one batch dimension, i.e. it
# expects image to be [batch, height, width, channels] and pixel_coords to be
# [batch, ..., 2]. So we need to reshape, perform the resampling, and then
# reshape back to what we had.
batch_dims = len(image.shape.as_list()) - 3
assert (image.shape.as_list()[:batch_dims] == pixel_coords.shape.as_list()
[:batch_dims])
batched_image, _ = utils.flatten_batch(image, batch_dims)
batched_coords, unflatten_coords = utils.flatten_batch(
pixel_coords, batch_dims)
resampled = tfa.image.resampler(batched_image, batched_coords)
# Convert back to the right shape to return
resampled = unflatten_coords(resampled)
return resampled
def bilinear_forward_warp(image, coords, weights=None):
"""Forward warp each point in an image using bilinear filtering.
This is a sort of reverse of sample_image, in the sense that scatter is the
reverse of gather. A new image is generated of the same size as the input, in
which each pixel has been splatted onto the 2x2 block containing the
corresponding coordinates, using bilinear weights (multiplied with the input
per-pixel weights, if supplied). Thus if two or more pixels warp to the same
point, the result will be a blend of the their values. If no pixels warp to a
location, the result at that location will be zero.
Args:
image: [B0, ..., Bn-1, height, width, channels] image data
coords: [B0, ..., Bn-1, height, width, 2] (x,y) texture coordinates
weights: [B0, ... ,Bn-1, height, width] weights for each point. If omitted,
all points are weighed equally. Use this to implement, for example, soft
z-buffering.
Returns:
[B0, ..., Bn-1, ..., channels] image data, in which each point in the
input image has been moved to the position indicated by the corresponding
(x,y) texture coordinates. The image and coords parameters must have
matching batch dimensions B0, ..., Bn-1.
"""
# Forward-warp computed using the gradient of reverse-warp. We use a dummy
# image of the right size for reverse-warping. An extra channel is used to
# accumulate the total weight for each pixel which we'll then divide by.
image_and_ones = tf.concat([image, tf.ones_like(image[Ellipsis, -1:])], axis=-1)
dummy = tf.zeros_like(image_and_ones)
if weights is None:
weighted_image = image_and_ones
else:
weighted_image = image_and_ones * weights[Ellipsis, tf.newaxis]
with tf.GradientTape(watch_accessed_variables=False) as g:
g.watch(dummy)
reverse = tf.reduce_sum(
sample_image(dummy, coords) * weighted_image, [-3, -2])
grads = g.gradient(reverse, dummy)
rgb = grads[Ellipsis, :-1]
total = grads[Ellipsis, -1:]
result = tf.math.divide_no_nan(rgb, total)
return result
def flow_warp(image, flow):
"""Warp images by resampling according to flow vectors.
Args:
image: [..., H, W, C] images
flow: [..., H, W, 2] (x, y) texture offsets
Returns:
[..., H, W, C] resampled images. Each pixel in each output image has been
bilinearly sampled from the corresponding pixel in its input image plus
the (x, y) flow vector. The flow vectors are texture coordinate offsets,
e.g. (1, 1) is an offset of the whole width and height of the image.
Sampling outside the image yields zero values.
"""
width = image.shape.as_list()[-2]
height = image.shape.as_list()[-3]
grid = pixel_center_grid(height, width)
coords = grid + flow
return sample_image(image, coords)
def flow_forward_warp(image, flow):
"""Forward-warp images according to flow vectors.
Args:
image: [..., H, W, C] images
flow: [..., H, W, 2] (x, y) texture offsets
Returns:
[..., H, W, C] warped images. Each pixel in each image is offset according
to the corresponding value in the flow, and splatted onto a 2x2 pixel block.
(See bilinear_forward_warp for details.) If no points warp to a location,
the result will be zero. The flow vectors are texture coordinate offsets,
e.g. (1, 1) is an offset of the whole width and height of the image.
"""
width = image.shape.as_list()[-2]
height = image.shape.as_list()[-3]
grid = pixel_center_grid(height, width)
coords = grid + flow
return bilinear_forward_warp(image, coords)
| [
"copybara-worker@google.com"
] | copybara-worker@google.com |
cf4869a008091dac50e4e6d07bded0da84f85bb3 | 2bcf18252fa9144ece3e824834ac0e117ad0bdf3 | /zpt/trunk/site-packages/zpt/_pytz/zoneinfo/Asia/Ulan_Bator.py | 23ee14fe6b126706fac6097086cd541788e4110c | [
"MIT",
"ZPL-2.1"
] | permissive | chadwhitacre/public | 32f65ba8e35d38c69ed4d0edd333283a239c5e1d | 0c67fd7ec8bce1d8c56c7ff3506f31a99362b502 | refs/heads/master | 2021-05-10T14:32:03.016000 | 2010-05-13T18:24:20 | 2010-05-13T18:24:20 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,011 | py | '''tzinfo timezone information for Asia/Ulan_Bator.'''
from zpt._pytz.tzinfo import DstTzInfo
from zpt._pytz.tzinfo import memorized_datetime as d
from zpt._pytz.tzinfo import memorized_ttinfo as i
class Ulan_Bator(DstTzInfo):
'''Asia/Ulan_Bator timezone definition. See datetime.tzinfo for details'''
zone = 'Asia/Ulan_Bator'
_utc_transition_times = [
d(1,1,1,0,0,0),
d(1905,7,31,16,52,28),
d(1977,12,31,17,0,0),
d(1983,3,31,16,0,0),
d(1983,9,30,15,0,0),
d(1984,3,31,16,0,0),
d(1984,9,29,18,0,0),
d(1985,3,30,18,0,0),
d(1985,9,28,18,0,0),
d(1986,3,29,18,0,0),
d(1986,9,27,18,0,0),
d(1987,3,28,18,0,0),
d(1987,9,26,18,0,0),
d(1988,3,26,18,0,0),
d(1988,9,24,18,0,0),
d(1989,3,25,18,0,0),
d(1989,9,23,18,0,0),
d(1990,3,24,18,0,0),
d(1990,9,29,18,0,0),
d(1991,3,30,18,0,0),
d(1991,9,28,18,0,0),
d(1992,3,28,18,0,0),
d(1992,9,26,18,0,0),
d(1993,3,27,18,0,0),
d(1993,9,25,18,0,0),
d(1994,3,26,18,0,0),
d(1994,9,24,18,0,0),
d(1995,3,25,18,0,0),
d(1995,9,23,18,0,0),
d(1996,3,30,18,0,0),
d(1996,9,28,18,0,0),
d(1997,3,29,18,0,0),
d(1997,9,27,18,0,0),
d(1998,3,28,18,0,0),
d(1998,9,26,18,0,0),
d(2001,4,27,18,0,0),
d(2001,9,28,17,0,0),
d(2002,3,29,18,0,0),
d(2002,9,27,17,0,0),
d(2003,3,28,18,0,0),
d(2003,9,26,17,0,0),
d(2004,3,26,18,0,0),
d(2004,9,24,17,0,0),
d(2005,3,25,18,0,0),
d(2005,9,23,17,0,0),
d(2006,3,24,18,0,0),
d(2006,9,29,17,0,0),
d(2007,3,30,18,0,0),
d(2007,9,28,17,0,0),
d(2008,3,28,18,0,0),
d(2008,9,26,17,0,0),
d(2009,3,27,18,0,0),
d(2009,9,25,17,0,0),
d(2010,3,26,18,0,0),
d(2010,9,24,17,0,0),
d(2011,3,25,18,0,0),
d(2011,9,23,17,0,0),
d(2012,3,30,18,0,0),
d(2012,9,28,17,0,0),
d(2013,3,29,18,0,0),
d(2013,9,27,17,0,0),
d(2014,3,28,18,0,0),
d(2014,9,26,17,0,0),
d(2015,3,27,18,0,0),
d(2015,9,25,17,0,0),
d(2016,3,25,18,0,0),
d(2016,9,23,17,0,0),
d(2017,3,24,18,0,0),
d(2017,9,29,17,0,0),
d(2018,3,30,18,0,0),
d(2018,9,28,17,0,0),
d(2019,3,29,18,0,0),
d(2019,9,27,17,0,0),
d(2020,3,27,18,0,0),
d(2020,9,25,17,0,0),
d(2021,3,26,18,0,0),
d(2021,9,24,17,0,0),
d(2022,3,25,18,0,0),
d(2022,9,23,17,0,0),
d(2023,3,24,18,0,0),
d(2023,9,29,17,0,0),
d(2024,3,29,18,0,0),
d(2024,9,27,17,0,0),
d(2025,3,28,18,0,0),
d(2025,9,26,17,0,0),
d(2026,3,27,18,0,0),
d(2026,9,25,17,0,0),
d(2027,3,26,18,0,0),
d(2027,9,24,17,0,0),
d(2028,3,24,18,0,0),
d(2028,9,29,17,0,0),
d(2029,3,30,18,0,0),
d(2029,9,28,17,0,0),
d(2030,3,29,18,0,0),
d(2030,9,27,17,0,0),
d(2031,3,28,18,0,0),
d(2031,9,26,17,0,0),
d(2032,3,26,18,0,0),
d(2032,9,24,17,0,0),
d(2033,3,25,18,0,0),
d(2033,9,23,17,0,0),
d(2034,3,24,18,0,0),
d(2034,9,29,17,0,0),
d(2035,3,30,18,0,0),
d(2035,9,28,17,0,0),
d(2036,3,28,18,0,0),
d(2036,9,26,17,0,0),
d(2037,3,27,18,0,0),
d(2037,9,25,17,0,0),
]
_transition_info = [
i(25680,0,'LMT'),
i(25200,0,'ULAT'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
]
Ulan_Bator = Ulan_Bator()
| [
"chad@zetaweb.com"
] | chad@zetaweb.com |
85ef73de5c1fceffd5aff452e2b9902d1718602f | 5ca6730fa1178582d5f5875155f340ec0f406294 | /practice_problem-16.py | 44785ae4df282d5b7cc6f83173866d825eb41375 | [] | no_license | MahadiRahman262523/Python_Code_Part-1 | 9740d5ead27209d69af4497eea410f2faef50ff3 | e2f08e3d0564a003400743ae6050fd687c280639 | refs/heads/main | 2023-07-25T09:10:53.649000 | 2021-09-05T19:39:14 | 2021-09-05T19:39:14 | 403,396,706 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 135 | py | # Write a program to count the number of zeros in the following tuple:
# a = (7,0,8,0,0,9)
a = (7,0,8,0,0,9)
print(a.count(0)) | [
"noreply@github.com"
] | MahadiRahman262523.noreply@github.com |
1b406b2dc38004db14248af19fb7f7be9b8e7f6c | 487ce91881032c1de16e35ed8bc187d6034205f7 | /codes/BuildLinks1.10/test_input/CJ_16_1/16_1_1_FreeTShirt_a.py | 0207b362ff64f55d6e7a49f758c368374d2c5dc1 | [] | no_license | DaHuO/Supergraph | 9cd26d8c5a081803015d93cf5f2674009e92ef7e | c88059dc66297af577ad2b8afa4e0ac0ad622915 | refs/heads/master | 2021-06-14T16:07:52.405000 | 2016-08-21T13:39:13 | 2016-08-21T13:39:13 | 49,829,508 | 2 | 0 | null | 2021-03-19T21:55:46 | 2016-01-17T18:23:00 | Python | UTF-8 | Python | false | false | 404 | py | def argmax(s):
z = max(s)
return [(idx, c) for idx, c in enumerate(s) if c == z]
def last(s):
if len(s) <= 1:
return s
return max([s[idx]+last(s[:idx])+s[idx+1:] for idx, c in argmax(s)])
fw = open('a-o', 'w')
for idx, line in enumerate(open('A-small-i')):
if idx == 0:
continue
s = line.strip()
print(s)
fw.write('Case #{0}: {1}\n'.format(idx,last(s)))
| [
"[dhuo@tcd.ie]"
] | [dhuo@tcd.ie] |
8732c9af3fea83ea57fa51e58d56b098749760f6 | 6561baa7ca68875e62fbf2d20c7887e4aadebe9f | /tests/cds_test_20_sf_ukmo.py | efa292077e335becd6970c33d7b3c44900ea5f35 | [
"Apache-2.0"
] | permissive | EXWEXs/cfgrib | 9057c9e5abbc38a32f113f832f1506988839ee82 | 8a1727af2c3bbcf2e17f250dfafcb4cc4e959354 | refs/heads/master | 2020-04-01T15:44:45.140000 | 2018-10-14T14:39:13 | 2018-10-14T14:39:13 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,089 | py |
import pytest
import cfgrib
import cdscommon
TEST_FILES = {
'seasonal-original-single-levels-ukmo': [
'seasonal-original-single-levels',
{
'originating_centre': 'ukmo',
'variable': 'maximum_2m_temperature_in_the_last_24_hours',
'year': '2018',
'month': ['04', '05'],
'day': [
'01', '02', '03', '04', '05', '06', '07', '08', '09', '10', '11', '12',
'13', '14', '15', '16', '17', '18', '19', '20', '21', '22', '23', '24',
'25', '26', '27', '28', '29', '30', '31'
],
'leadtime_hour': ['24', '48'],
'grid': ['3', '3'],
'format': 'grib',
},
192,
],
'seasonal-original-pressure-levels-ukmo': [
'seasonal-original-pressure-levels',
{
'originating_centre': 'ukmo',
'variable': 'temperature',
'pressure_level': ['500', '850'],
'year': '2018',
'month': ['04', '05'],
'day': [
'01', '02', '03', '04', '05', '06', '07', '08', '09', '10', '11', '12',
'13', '14', '15', '16', '17', '18', '19', '20', '21', '22', '23', '24',
'25', '26', '27', '28', '29', '30', '31'
],
'leadtime_hour': ['24', '48'],
'grid': ['3', '3'],
'format': 'grib',
},
192,
],
'seasonal-postprocessed-single-levels-ukmo': [
'seasonal-postprocessed-single-levels',
{
'originating_centre': 'ukmo',
'variable': 'maximum_2m_temperature_in_the_last_24_hours_anomaly',
'product_type': 'monthly_mean',
'year': '2018',
'month': ['04', '05'],
'leadtime_month': ['1', '2'],
'grid': ['3', '3'],
'format': 'grib',
},
210,
],
'seasonal-monthly-single-levels-monthly_mean-ukmo': [
'seasonal-monthly-single-levels',
{
'originating_centre': 'ukmo',
'variable': 'maximum_2m_temperature_in_the_last_24_hours',
'product_type': 'monthly_mean',
'year': '2018',
'month': ['04', '05'],
'leadtime_month': ['1', '2'],
'grid': ['3', '3'],
'format': 'grib',
},
210,
],
'seasonal-monthly-single-levels-ensemble_mean-ukmo': [
'seasonal-monthly-single-levels',
{
'originating_centre': 'ukmo',
'variable': 'maximum_2m_temperature_in_the_last_24_hours',
'product_type': 'ensemble_mean',
'year': '2018',
'month': ['04', '05'],
'leadtime_month': ['1', '2'],
'grid': ['3', '3'],
'format': 'grib',
},
210,
],
'seasonal-monthly-single-levels-hindcast_climate_mean-ukmo': [
'seasonal-monthly-single-levels',
{
'originating_centre': 'ukmo',
'variable': 'maximum_2m_temperature_in_the_last_24_hours',
'product_type': 'hindcast_climate_mean',
'year': '2018',
'month': ['04', '05'],
'leadtime_month': ['1', '2'],
'grid': ['3', '3'],
'format': 'grib',
},
210,
],
}
@pytest.mark.parametrize('test_file', TEST_FILES.keys())
def test_reanalysis_Stream(test_file):
dataset, request, key_count = TEST_FILES[test_file]
path = cdscommon.ensure_data(dataset, request, name='cds-' + test_file + '-{uuid}.grib')
stream = cfgrib.FileStream(path)
leader = stream.first()
assert len(leader) == key_count
assert sum(1 for _ in stream) == leader['count']
@pytest.mark.parametrize('test_file', TEST_FILES.keys())
def test_reanalysis_Dataset(test_file):
dataset, request, key_count = TEST_FILES[test_file]
path = cdscommon.ensure_data(dataset, request, name='cds-' + test_file + '-{uuid}.grib')
res = cfgrib.xarray_store.open_dataset(path, flavour_name='cds')
res.to_netcdf(path[:-5] + '.nc')
| [
"a.amici@bopen.eu"
] | a.amici@bopen.eu |
5e0bde2a16193651c22bf50efd429a326bf6f474 | 6b564e24a99b2d2c6a384d8674974f10ef9461d5 | /iptv_proxy/providers/crystalclear/data_model.py | 53c6ad0d72865ecf54ed3413a6d9df1d667e4c12 | [
"MIT"
] | permissive | Onemars/IPTVProxy | 1c1421c6962c1f7cf4cef90d8a2c98e98f5ded25 | 06d5472f49ecaa7eafb90832a1c9ac85a09cd268 | refs/heads/master | 2020-05-24T14:34:48.486000 | 2019-05-17T14:17:21 | 2019-05-17T14:17:21 | 187,311,948 | 1 | 0 | null | 2019-05-18T03:58:48 | 2019-05-18T03:58:47 | null | UTF-8 | Python | false | false | 6,858 | py | import logging
from sqlalchemy import Column
from sqlalchemy import Index
from sqlalchemy import Integer
from sqlalchemy import LargeBinary
from sqlalchemy import String
from sqlalchemy.ext.hybrid import hybrid_property
from iptv_proxy.data_model import DateTimeUTC
from iptv_proxy.providers.crystalclear.constants import CrystalClearConstants
from iptv_proxy.providers.crystalclear.db import Base
logger = logging.getLogger(__name__)
class CrystalClearChannel(Base):
_provider_name = CrystalClearConstants.PROVIDER_NAME.lower()
__tablename__ = 'channel'
_id = Column('id', String, primary_key=True, autoincrement=False)
_m3u8_group = Column('m3u8_group', String, nullable=False)
_number = Column('number', Integer, nullable=False)
_name = Column('name', String, nullable=False)
_pickle = Column('pickle', LargeBinary, nullable=False)
_complete_xmltv = Column('complete_xmltv', String, nullable=False)
_minimal_xmltv = Column('minimal_xmltv', String, nullable=False)
__table_args__ = (Index('{0}_channel_ix_id'.format(_provider_name), _id.asc()),
Index('{0}_channel_ix_m3u8_group'.format(_provider_name), _m3u8_group.asc()),
Index('{0}_channel_ix_m3u8_group_&_number'.format(_provider_name),
_m3u8_group.asc(),
_number.asc()),
Index('{0}_channel_ix_number'.format(_provider_name), _number.asc()))
def __init__(self, id_, m3u8_group, number, name, pickle, complete_xmltv, minimal_xmltv):
self._id = id_
self._m3u8_group = m3u8_group
self._number = number
self._name = name
self._pickle = pickle
self._complete_xmltv = complete_xmltv
self._minimal_xmltv = minimal_xmltv
@hybrid_property
def complete_xmltv(self):
return self._complete_xmltv
@complete_xmltv.setter
def complete_xmltv(self, complete_xmltv):
self._complete_xmltv = complete_xmltv
@hybrid_property
def id(self):
return self._id
@id.setter
def id(self, id_):
self._id = id_
@hybrid_property
def m3u8_group(self):
return self._m3u8_group
@m3u8_group.setter
def m3u8_group(self, m3u8_group):
self._m3u8_group = m3u8_group
@hybrid_property
def minimal_xmltv(self):
return self._minimal_xmltv
@minimal_xmltv.setter
def minimal_xmltv(self, minimal_xmltv):
self._minimal_xmltv = minimal_xmltv
@hybrid_property
def name(self):
return self._name
@name.setter
def name(self, name):
self._name = name
@hybrid_property
def number(self):
return self._number
@number.setter
def number(self, number):
self._number = number
@hybrid_property
def pickle(self):
return self._pickle
@pickle.setter
def pickle(self, pickle):
self._pickle = pickle
class CrystalClearProgram(Base):
_provider_name = CrystalClearConstants.PROVIDER_NAME.lower()
__tablename__ = 'program'
_id = Column('id', String, primary_key=True, autoincrement=False)
_start = Column('start', DateTimeUTC(timezone=True), nullable=False)
_stop = Column('stop', DateTimeUTC(timezone=True), nullable=False)
_channel_xmltv_id = Column('channel_xmltv_id', String, nullable=False)
_channel_number = Column('channel_number', Integer, nullable=False)
_pickle = Column('pickle', LargeBinary, nullable=False)
_complete_xmltv = Column('complete_xmltv', String, nullable=False)
_minimal_xmltv = Column('minimal_xmltv', String, nullable=False)
__table_args__ = (
Index('{0}_program_ix_id'.format(_provider_name), _id.asc()),
Index('{0}_program_ix_channel_number_&_start'.format(_provider_name), _channel_number.asc(), _start.asc()),
Index('{0}_program_ix_channel_xmltv_id_&_start'.format(_provider_name), _channel_xmltv_id.asc(), _start.asc()),
Index('{0}_program_ix_channel_xmltv_id_&_start_&_stop'.format(_provider_name),
_channel_xmltv_id.asc(),
_start.asc(),
_stop.asc()),
Index('{0}_program_ix_start'.format(_provider_name), _start.asc()))
def __init__(self,
id_,
start,
stop,
channel_xmltv_id,
channel_number,
pickle,
complete_xmltv,
minimal_xmltv):
self._id = id_
self._start = start
self._stop = stop
self._channel_xmltv_id = channel_xmltv_id
self._channel_number = channel_number
self._pickle = pickle
self._complete_xmltv = complete_xmltv
self._minimal_xmltv = minimal_xmltv
@hybrid_property
def channel_number(self):
return self._channel_number
@channel_number.setter
def channel_number(self, channel_number):
self._channel_number = channel_number
@hybrid_property
def channel_xmltv_id(self):
return self._channel_xmltv_id
@channel_xmltv_id.setter
def channel_xmltv_id(self, channel_xmltv_id):
self._channel_xmltv_id = channel_xmltv_id
@hybrid_property
def complete_xmltv(self):
return self._complete_xmltv
@complete_xmltv.setter
def complete_xmltv(self, complete_xmltv):
self._complete_xmltv = complete_xmltv
@hybrid_property
def id(self):
return self._id
@id.setter
def id(self, id_):
self._id = id_
@hybrid_property
def minimal_xmltv(self):
return self._minimal_xmltv
@minimal_xmltv.setter
def minimal_xmltv(self, minimal_xmltv):
self._minimal_xmltv = minimal_xmltv
@hybrid_property
def pickle(self):
return self._pickle
@pickle.setter
def pickle(self, pickle):
self._pickle = pickle
@hybrid_property
def start(self):
return self._start
@start.setter
def start(self, start):
self._start = start
@hybrid_property
def stop(self):
return self._stop
@stop.setter
def stop(self, stop):
self._stop = stop
class CrystalClearSetting(Base):
_provider_name = CrystalClearConstants.PROVIDER_NAME.lower()
__tablename__ = 'setting'
_name = Column('name', String, primary_key=True)
_value = Column('value', String, nullable=False)
__table_args__ = (Index('setting_ix_name', _name.asc()),)
def __init__(self, name, value):
self._name = name
self._value = value
@hybrid_property
def name(self):
return self._name
@name.setter
def name(self, name):
self._name = name
@hybrid_property
def value(self):
return self._value
@value.setter
def value(self, value):
self._value = value
| [
"sherif.fanous@gmail.com"
] | sherif.fanous@gmail.com |
727c6dd5a9d6d63154d4df935778852dc73c00fa | c590571d129ead00bd1916025f854a1719d75683 | /zvt/recorders/joinquant/meta/china_stock_meta_recorder.py | fa4a0c4364dd713ab0f74d8b7829a1b6f86f10ac | [
"MIT"
] | permissive | ming123jew/zvt | f2fb8e157951e9440a6decd5ae0c08ea227a39db | de66a48ad2a3ac2c3fb22b9ea17a85f28e95cc62 | refs/heads/master | 2023-05-28T15:00:52.015000 | 2021-06-13T12:56:18 | 2021-06-13T12:56:18 | 570,070,597 | 1 | 0 | MIT | 2022-11-24T09:16:48 | 2022-11-24T09:16:47 | null | UTF-8 | Python | false | false | 5,733 | py | # -*- coding: utf-8 -*-
import pandas as pd
from jqdatapy.api import get_all_securities, run_query
from zvt.api.quote import china_stock_code_to_id, portfolio_relate_stock
from zvt.contract.api import df_to_db, get_entity_exchange, get_entity_code
from zvt.contract.recorder import Recorder, TimeSeriesDataRecorder
from zvt.domain import EtfStock, Stock, Etf, StockDetail
from zvt.recorders.joinquant.common import to_entity_id, jq_to_report_period
from zvt.utils.pd_utils import pd_is_not_null
from zvt.utils.time_utils import to_time_str
class BaseJqChinaMetaRecorder(Recorder):
provider = 'joinquant'
def __init__(self, batch_size=10, force_update=True, sleeping_time=10) -> None:
super().__init__(batch_size, force_update, sleeping_time)
def to_zvt_entity(self, df, entity_type, category=None):
df = df.set_index('code')
df.index.name = 'entity_id'
df = df.reset_index()
# 上市日期
df.rename(columns={'start_date': 'timestamp'}, inplace=True)
df['timestamp'] = pd.to_datetime(df['timestamp'])
df['list_date'] = df['timestamp']
df['end_date'] = pd.to_datetime(df['end_date'])
df['entity_id'] = df['entity_id'].apply(lambda x: to_entity_id(entity_type=entity_type, jq_code=x))
df['id'] = df['entity_id']
df['entity_type'] = entity_type
df['exchange'] = df['entity_id'].apply(lambda x: get_entity_exchange(x))
df['code'] = df['entity_id'].apply(lambda x: get_entity_code(x))
df['name'] = df['display_name']
if category:
df['category'] = category
return df
class JqChinaStockRecorder(BaseJqChinaMetaRecorder):
data_schema = Stock
def run(self):
# 抓取股票列表
df_stock = self.to_zvt_entity(get_all_securities(code='stock'), entity_type='stock')
df_to_db(df_stock, data_schema=Stock, provider=self.provider, force_update=self.force_update)
# persist StockDetail too
df_to_db(df=df_stock, data_schema=StockDetail, provider=self.provider, force_update=self.force_update)
# self.logger.info(df_stock)
self.logger.info("persist stock list success")
class JqChinaEtfRecorder(BaseJqChinaMetaRecorder):
data_schema = Etf
def run(self):
# 抓取etf列表
df_index = self.to_zvt_entity(get_all_securities(code='etf'), entity_type='etf', category='etf')
df_to_db(df_index, data_schema=Etf, provider=self.provider, force_update=self.force_update)
# self.logger.info(df_index)
self.logger.info("persist etf list success")
class JqChinaStockEtfPortfolioRecorder(TimeSeriesDataRecorder):
entity_provider = 'joinquant'
entity_schema = Etf
# 数据来自jq
provider = 'joinquant'
data_schema = EtfStock
def __init__(self, entity_type='etf', exchanges=['sh', 'sz'], entity_ids=None, codes=None, day_data=True, batch_size=10,
force_update=False, sleeping_time=5, default_size=2000, real_time=False, fix_duplicate_way='add',
start_timestamp=None, end_timestamp=None, close_hour=0, close_minute=0) -> None:
super().__init__(entity_type, exchanges, entity_ids, codes, day_data, batch_size, force_update, sleeping_time,
default_size, real_time, fix_duplicate_way, start_timestamp, end_timestamp, close_hour,
close_minute)
def record(self, entity, start, end, size, timestamps):
df = run_query(table='finance.FUND_PORTFOLIO_STOCK',
conditions=f'pub_date#>=#{to_time_str(start)}&code#=#{entity.code}',
parse_dates=None)
if pd_is_not_null(df):
# id code period_start period_end pub_date report_type_id report_type rank symbol name shares market_cap proportion
# 0 8640569 159919 2018-07-01 2018-09-30 2018-10-26 403003 第三季度 1 601318 中国平安 19869239.0 1.361043e+09 7.09
# 1 8640570 159919 2018-07-01 2018-09-30 2018-10-26 403003 第三季度 2 600519 贵州茅台 921670.0 6.728191e+08 3.50
# 2 8640571 159919 2018-07-01 2018-09-30 2018-10-26 403003 第三季度 3 600036 招商银行 18918815.0 5.806184e+08 3.02
# 3 8640572 159919 2018-07-01 2018-09-30 2018-10-26 403003 第三季度 4 601166 兴业银行 22862332.0 3.646542e+08 1.90
df['timestamp'] = pd.to_datetime(df['pub_date'])
df.rename(columns={'symbol': 'stock_code', 'name': 'stock_name'}, inplace=True)
df['proportion'] = df['proportion'] * 0.01
df = portfolio_relate_stock(df, entity)
df['stock_id'] = df['stock_code'].apply(lambda x: china_stock_code_to_id(x))
df['id'] = df[['entity_id', 'stock_id', 'pub_date', 'id']].apply(lambda x: '_'.join(x.astype(str)), axis=1)
df['report_date'] = pd.to_datetime(df['period_end'])
df['report_period'] = df['report_type'].apply(lambda x: jq_to_report_period(x))
df_to_db(df=df, data_schema=self.data_schema, provider=self.provider, force_update=self.force_update)
# self.logger.info(df.tail())
self.logger.info(f"persist etf {entity.code} portfolio success {df.iloc[-1]['pub_date']}")
return None
if __name__ == '__main__':
# JqChinaEtfRecorder().run()
JqChinaStockEtfPortfolioRecorder(codes=['510050']).run()
# the __all__ is generated
__all__ = ['BaseJqChinaMetaRecorder', 'JqChinaStockRecorder', 'JqChinaEtfRecorder', 'JqChinaStockEtfPortfolioRecorder'] | [
"5533061@qq.com"
] | 5533061@qq.com |
da4e65994020ecec1aae6923a1bd83b3951032e3 | a90ba084b85683f4c52d0e638cfb6108207ced38 | /896.py | 91ca187efe65342ba1e072994842f422f065f605 | [] | no_license | JiayuZhai/leetcode_python3 | 4a9260d00a52cde9ec37e6292e64d04161e66111 | 5755c3edd6d949af18d0247d2103379510dfab85 | refs/heads/master | 2020-04-02T21:22:42.270000 | 2019-03-29T23:28:48 | 2019-03-29T23:28:48 | 154,796,956 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 162 | py | class Solution:
def isMonotonic(self, A: List[int]) -> bool:
sortA = sorted(A)
return (A == sortA or list(reversed(A)) == sortA)
| [
"zhaijiayu2013@gmail.com"
] | zhaijiayu2013@gmail.com |
90a5ad57cf62d7082f693f949d412f2d773b647a | 844c7f8fb8d6bfab912583c71b93695167c59764 | /fixação/Seção06/51-60/Sec06Ex51v2.py | 35580169e28f8bc9bc58b28718531dd96aa9d948 | [
"Apache-2.0"
] | permissive | gugajung/guppe | 2be10656cd9aa33be6afb8e86f20df82662bcc59 | a0ee7b85e8687e8fb8243fbb509119a94bc6460f | refs/heads/main | 2023-05-28T08:08:24.963000 | 2021-06-07T16:56:11 | 2021-06-07T16:56:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 624 | py | from datetime import date
anoAtual = 1995
salarioAtual = 2000
percentAumento = 1.5
dataAtual = date.today()
anoReal = dataAtual.year
while anoAtual <= anoReal:
salarioAtual = salarioAtual + ((salarioAtual*percentAumento)/100)
print("----------------------------------------")
print(" --- debug")
print(f" --- > Ano Atual : {anoAtual}")
print(f" --- > Salario Atual : {salarioAtual:.2f}")
print(f" --- > Percente de Aumento : {percentAumento:.4f}")
anoAtual += 1
percentAumento *= 2
print("=================================================")
print("Final de O programas") | [
"luiz.carlin@gmail.com"
] | luiz.carlin@gmail.com |
d3e7e9dae606fe6dc77d9c43997e9c592fbcd477 | 982bc95ab762829c8b6913e44504415cdd77241a | /account_easy_reconcile/base_reconciliation.py | b50c06b9eed699d96da272f0fb9dd9613177c235 | [] | no_license | smart-solution/natuurpunt-finance | 6b9eb65be96a4e3261ce46d7f0c31de3589e1e0d | 6eeb48468792e09d46d61b89499467a44d67bc79 | refs/heads/master | 2021-01-23T14:42:05.017000 | 2020-11-03T15:56:35 | 2020-11-03T15:56:35 | 39,186,046 | 0 | 1 | null | 2020-11-03T15:56:37 | 2015-07-16T08:36:54 | Python | UTF-8 | Python | false | false | 7,776 | py | # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright 2012-2013 Camptocamp SA (Guewen Baconnier)
# Copyright (C) 2010 Sébastien Beau
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, orm
from operator import itemgetter, attrgetter
class easy_reconcile_base(orm.AbstractModel):
"""Abstract Model for reconciliation methods"""
_name = 'easy.reconcile.base'
_inherit = 'easy.reconcile.options'
_columns = {
'account_id': fields.many2one(
'account.account', 'Account', required=True),
'partner_ids': fields.many2many(
'res.partner', string="Restrict on partners"),
# other columns are inherited from easy.reconcile.options
}
def automatic_reconcile(self, cr, uid, ids, context=None):
""" Reconciliation method called from the view.
:return: list of reconciled ids, list of partially reconciled items
"""
if isinstance(ids, (int, long)):
ids = [ids]
assert len(ids) == 1, "Has to be called on one id"
rec = self.browse(cr, uid, ids[0], context=context)
return self._action_rec(cr, uid, rec, context=context)
def _action_rec(self, cr, uid, rec, context=None):
""" Must be inherited to implement the reconciliation
:return: list of reconciled ids
"""
raise NotImplementedError
def _base_columns(self, rec):
""" Mandatory columns for move lines queries
An extra column aliased as ``key`` should be defined
in each query."""
aml_cols = (
'id',
'debit',
'credit',
'date',
'period_id',
'ref',
'name',
'partner_id',
'account_id',
'move_id')
return ["account_move_line.%s" % col for col in aml_cols]
def _select(self, rec, *args, **kwargs):
return "SELECT %s" % ', '.join(self._base_columns(rec))
def _from(self, rec, *args, **kwargs):
return "FROM account_move_line"
def _where(self, rec, *args, **kwargs):
where = ("WHERE account_move_line.account_id = %s "
"AND account_move_line.reconcile_id IS NULL ")
# it would be great to use dict for params
# but as we use _where_calc in _get_filter
# which returns a list, we have to
# accomodate with that
params = [rec.account_id.id]
if rec.partner_ids:
where += " AND account_move_line.partner_id IN %s"
params.append(tuple([l.id for l in rec.partner_ids]))
return where, params
def _get_filter(self, cr, uid, rec, context):
ml_obj = self.pool.get('account.move.line')
where = ''
params = []
if rec.filter:
dummy, where, params = ml_obj._where_calc(
cr, uid, eval(rec.filter), context=context).get_sql()
if where:
where = " AND %s" % where
return where, params
def _below_writeoff_limit(self, cr, uid, rec, lines,
writeoff_limit, context=None):
precision = self.pool.get('decimal.precision').precision_get(
cr, uid, 'Account')
keys = ('debit', 'credit')
sums = reduce(
lambda line, memo:
dict((key, value + memo[key])
for key, value
in line.iteritems()
if key in keys), lines)
debit, credit = sums['debit'], sums['credit']
writeoff_amount = round(debit - credit, precision)
return bool(writeoff_limit >= abs(writeoff_amount)), debit, credit
def _get_rec_date(self, cr, uid, rec, lines,
based_on='end_period_last_credit', context=None):
period_obj = self.pool.get('account.period')
def last_period(mlines):
period_ids = [ml['period_id'] for ml in mlines]
periods = period_obj.browse(
cr, uid, period_ids, context=context)
return max(periods, key=attrgetter('date_stop'))
def last_date(mlines):
return max(mlines, key=itemgetter('date'))
def credit(mlines):
return [l for l in mlines if l['credit'] > 0]
def debit(mlines):
return [l for l in mlines if l['debit'] > 0]
if based_on == 'end_period_last_credit':
return last_period(credit(lines)).date_stop
if based_on == 'end_period':
return last_period(lines).date_stop
elif based_on == 'newest':
return last_date(lines)['date']
elif based_on == 'newest_credit':
return last_date(credit(lines))['date']
elif based_on == 'newest_debit':
return last_date(debit(lines))['date']
# reconcilation date will be today
# when date is None
return None
def _reconcile_lines(self, cr, uid, rec, lines, allow_partial=False, context=None):
""" Try to reconcile given lines
:param list lines: list of dict of move lines, they must at least
contain values for : id, debit, credit
:param boolean allow_partial: if True, partial reconciliation will be
created, otherwise only Full
reconciliation will be created
:return: tuple of boolean values, first item is wether the items
have been reconciled or not,
the second is wether the reconciliation is full (True)
or partial (False)
"""
if context is None:
context = {}
ml_obj = self.pool.get('account.move.line')
writeoff = rec.write_off
line_ids = [l['id'] for l in lines]
below_writeoff, sum_debit, sum_credit = self._below_writeoff_limit(
cr, uid, rec, lines, writeoff, context=context)
date = self._get_rec_date(
cr, uid, rec, lines, rec.date_base_on, context=context)
rec_ctx = dict(context, date_p=date)
if below_writeoff:
if sum_credit < sum_debit:
writeoff_account_id = rec.account_profit_id.id
else:
writeoff_account_id = rec.account_lost_id.id
period_id = self.pool.get('account.period').find(
cr, uid, dt=date, context=context)[0]
ml_obj.reconcile(
cr, uid,
line_ids,
type='auto',
writeoff_acc_id=writeoff_account_id,
writeoff_period_id=period_id,
writeoff_journal_id=rec.journal_id.id,
context=rec_ctx)
return True, True
elif allow_partial:
ml_obj.reconcile_partial(
cr, uid,
line_ids,
type='manual',
context=rec_ctx)
return True, False
return False, False
| [
"fabian.semal@smartsolution.be"
] | fabian.semal@smartsolution.be |
ae535fe72253b6c574f7196c75a3b64e003c3ea3 | ccb6918eff9624bc890c4318462b3d04fe01ab25 | /d02/for/for/settings.py | 763917cea83d3de15fae9c387027213bdac3fd6e | [] | no_license | shchliu/19django | 431202f3b4a71fb2614f3f113174df327a338413 | 63af6aeff279a83fb170c1b5385d0804d96fafad | refs/heads/master | 2020-08-15T08:53:36.707000 | 2019-10-16T08:26:41 | 2019-10-16T08:28:32 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,121 | py | """
Django settings for for project.
Generated by 'django-admin startproject' using Django 2.0.
For more information on this file, see
https://docs.djangoproject.com/en/2.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.0/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'n$s!ww49p_&vb4(^$4-n#s(98qsu+(61j_2w2)&7pbx+3(k_x+'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'for.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')]
,
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'for.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.0/howto/static-files/
STATIC_URL = '/static/'
| [
"1124874942@qq.com"
] | 1124874942@qq.com |
b6e187de710d37037dd7c0d830a50e7eaee1aa28 | 786027545626c24486753351d6e19093b261cd7d | /ghidra9.2.1_pyi/ghidra/app/util/bin/format/xcoff/XCoffSectionHeaderFlags.pyi | 43a745532a3157885655ec9c25a175e6ac3df2ec | [
"MIT"
] | permissive | kohnakagawa/ghidra_scripts | 51cede1874ef2b1fed901b802316449b4bf25661 | 5afed1234a7266c0624ec445133280993077c376 | refs/heads/main | 2023-03-25T08:25:16.842000 | 2021-03-18T13:31:40 | 2021-03-18T13:31:40 | 338,577,905 | 14 | 1 | null | null | null | null | UTF-8 | Python | false | false | 772 | pyi | import java.lang
class XCoffSectionHeaderFlags(object):
STYP_BSS: int = 128
STYP_DATA: int = 64
STYP_DEBUG: int = 8192
STYP_EXCEPT: int = 128
STYP_INFO: int = 512
STYP_LOADER: int = 4096
STYP_OVRFLO: int = 32768
STYP_PAD: int = 8
STYP_TEXT: int = 32
STYP_TYPCHK: int = 16384
def __init__(self): ...
def equals(self, __a0: object) -> bool: ...
def getClass(self) -> java.lang.Class: ...
def hashCode(self) -> int: ...
def notify(self) -> None: ...
def notifyAll(self) -> None: ...
def toString(self) -> unicode: ...
@overload
def wait(self) -> None: ...
@overload
def wait(self, __a0: long) -> None: ...
@overload
def wait(self, __a0: long, __a1: int) -> None: ...
| [
"tsunekou1019@gmail.com"
] | tsunekou1019@gmail.com |
ce42ed7f15ab68df41c64c17c877f642173d66a2 | a7cca49626a3d7100e9ac5c2f343c351ecb76ac7 | /upydev/argcompleter.py | 3751c0a54657cd37a9a63de43d2f4f77ad8882e7 | [
"MIT"
] | permissive | Carglglz/upydev | 104455d77d64300074bda54d86bd791f19184975 | 529aa29f3e1acf8160383fe410b5659110dc96de | refs/heads/master | 2023-05-24T18:38:56.242000 | 2022-10-21T14:03:17 | 2022-10-21T14:03:17 | 199,335,165 | 49 | 9 | MIT | 2022-10-21T14:03:18 | 2019-07-28T20:42:00 | Python | UTF-8 | Python | false | false | 57,893 | py | import os
from upydev import __path__
UPYDEV_PATH = __path__[0]
# SHELL_CMD_PARSER
shell_commands = ['cd', 'mkdir', 'cat', 'head', 'rm', 'rmdir', 'pwd',
'run', 'mv']
custom_sh_cmd_kw = ['df', 'datetime', 'ifconfig', 'net',
'ap', 'mem', 'install', 'touch',
'exit', 'lpwd', 'lsl', 'lcd', 'put', 'get', 'ls',
'set', 'tree', 'dsync', 'reload', 'docs',
'du', 'ldu', 'upip', 'uping',
'timeit', 'i2c',
'upy-config', 'jupyterc', 'pytest', 'rssi',
'info', 'id', 'uhelp', 'modules', 'shasum', 'vim',
'update_upyutils', 'mdocs', 'ctime', 'enable_sh',
'diff', 'config', 'fw', 'mpyx', 'sd', 'uptime', 'cycles', 'play']
LS = dict(help="list files or directories",
subcmd=dict(help='indicate a file/dir or pattern to see', default=[],
metavar='file/dir/pattern', nargs='*'),
options={"-a": dict(help='list hidden files', required=False,
default=False,
action='store_true'),
"-d": dict(help='depth level', required=False,
default=0,
type=int)})
HEAD = dict(help="display first lines of a file",
subcmd=dict(help='indicate a file or pattern to see', default=[],
metavar='file/pattern', nargs='*'),
options={"-n": dict(help='number of lines to print', required=False,
default=10,
type=int)})
CAT = dict(help="concatenate and print files",
subcmd=dict(help='indicate a file or pattern to see', default=[],
metavar='file/pattern', nargs='*'),
options={"-d": dict(help='depth level', required=False,
default=0,
type=int)})
MKDIR = dict(help="make directories",
subcmd=dict(help='indicate a dir/pattern to create', default=[],
metavar='dir', nargs='*'),
options={})
CD = dict(help="change current working directory",
subcmd=dict(help='indicate a dir to change to', default='/',
metavar='dir', nargs='?'),
options={})
MV = dict(help="move/rename a file",
subcmd=dict(help='indicate a file to rename', default=[],
metavar='file', nargs=2),
options={})
PWD = dict(help="print current working directory",
subcmd={},
options={})
RM = dict(help="remove file or pattern of files",
subcmd=dict(help='indicate a file/pattern to remove', default=[],
metavar='file/dir/pattern', nargs='+'),
options={"-rf": dict(help='remove recursive force a dir or file',
required=False,
default=False,
action='store_true'),
"-d": dict(help='depth level search', required=False,
default=0,
type=int),
"-dd": dict(help='filter for directories only', required=False,
default=False,
action='store_true')})
RMDIR = dict(help="remove directories or pattern of directories",
subcmd=dict(help='indicate a dir/pattern to remove', default=[],
metavar='dir', nargs='+'),
options={"-d": dict(help='depth level search', required=False,
default=0,
type=int)})
DU = dict(help="display disk usage statistics",
subcmd=dict(help='indicate a dir to see usage', default='',
metavar='dir', nargs='?'),
options={"-d": dict(help='depth level', required=False,
default=0,
type=int),
"-p": dict(help='pattern to match', required=False,
default=[],
nargs='*')})
TREE = dict(help="list contents of directories in a tree-like format",
subcmd=dict(help='indicate a dir to see', default='',
metavar='dir', nargs='?'),
options={"-a": dict(help='list hidden files', required=False,
default=False,
action='store_true')})
DF = dict(help="display free disk space",
subcmd={},
options={})
MEM = dict(help="show ram usage info",
subcmd=dict(help='{info , dump}; default: info',
default='info',
metavar='action', choices=['info', 'dump'], nargs='?'),
options={})
EXIT = dict(help="exit upydev shell",
subcmd={},
options={"-r": dict(help='soft-reset after exit', required=False,
default=False,
action='store_true'),
"-hr": dict(help='hard-reset after exit', required=False,
default=False,
action='store_true')})
VIM = dict(help="use vim to edit files",
subcmd=dict(help='indicate a file to edit', default='',
metavar='file', nargs='?'),
options={"-rm": dict(help='remove local copy after upload', required=False,
default=False,
action='store_true'),
"-e": dict(help='execute script after upload', required=False,
default=False,
action='store_true'),
"-r": dict(help='reload script so it can run again',
required=False,
default=False,
action='store_true'),
"-o": dict(help='override local copy if present',
required=False,
default=False,
action='store_true'),
"-d": dict(help=('use vim diff between device and local files'
', if same file name device file is ~file'),
required=False,
default=[],
nargs='+')})
DIFF = dict(help=("use git diff between device's [~file/s] and local file/s"),
subcmd=dict(help='indicate files to compare or pattern', default=['*', '*'],
metavar='fileA fileB', nargs='+'),
options={"-s": dict(help='switch file comparison',
required=False,
default=False,
action='store_true')})
RUN = dict(help="run device's scripts",
subcmd=dict(help='indicate a file/script to run', default='',
metavar='file'),
options={"-r": dict(help='reload script so it can run again',
required=False,
default=False,
action='store_true'),
})
RELOAD = dict(help="reload device's scripts",
subcmd=dict(help='indicate a file/script to reload', default='',
metavar='file', nargs=1),
options={})
LCD = dict(help="change local current working directory",
subcmd=dict(help='indicate a dir to change to', default='',
metavar='dir', nargs='?'),
options={})
LSL = dict(help="list local files or directories",
subcmd=dict(help='indicate a file/dir or pattern to see', default=[],
metavar='file/dir/pattern', nargs='*'),
options={"-a": dict(help='list hidden files', required=False,
default=False,
action='store_true')})
LPWD = dict(help="print local current working directory",
subcmd={},
options={})
LDU = dict(help="display local disk usage statistics",
subcmd=dict(help='indicate a dir to see usage', default='',
metavar='dir', nargs='?'),
options={"-d": dict(help='depth level', required=False,
default=0,
type=int)})
INFO = dict(help="prints device's info",
subcmd={},
options={})
ID = dict(help="prints device's unique id",
subcmd={},
options={})
UHELP = dict(help="prints device's help info",
subcmd={},
options={})
MODULES = dict(help="prints device's frozen modules",
subcmd={},
options={})
UPING = dict(help="device send ICMP ECHO_REQUEST packets to network hosts",
subcmd=dict(help='indicate an IP address to ping; default: host IP',
default='host',
metavar='IP', nargs='?'),
options={})
RSSI = dict(help="prints device's RSSI (WiFi or BLE)",
subcmd={},
options={})
NET = dict(help="manage network station interface (STA._IF)",
desc="enable/disable station inteface, config and connect to or scan APs",
subcmd=dict(help='{status, on, off, config, scan}; default: status',
default='status',
metavar='command',
choices=['status', 'on', 'off', 'config', 'scan'],
nargs='?'),
options={"-wp": dict(help='ssid, password for config command',
required=False,
nargs=2)})
IFCONFIG = dict(help="prints network interface configuration (STA._IF)",
subcmd={},
options={"-t": dict(help='print info in table format',
required=False,
default=False,
action='store_true')})
AP = dict(help="manage network acces point interface (AP._IF)",
desc="enable/disable ap inteface, config an AP or scan connected clients",
subcmd=dict(help='{status, on, off, scan, config}; default: status',
default='status',
metavar='command',
choices=['status', 'on', 'off', 'config', 'scan'],
nargs='?'),
options={"-ap": dict(help='ssid, password for config command',
required=False,
nargs=2),
"-t": dict(help='print info in table format',
required=False,
default=False,
action='store_true')})
I2C = dict(help="manage I2C interface",
subcmd=dict(help='{config, scan}; default: config',
default='config',
metavar='action',
choices=['config', 'scan'],
nargs='?'),
options={"-i2c": dict(help='[scl] [sda] for config command',
required=False,
default=[22, 23],
nargs=2)})
SET = dict(help="set device's configuration {rtc, hostname, localname}",
subcmd=dict(help=('set parameter configuration {rtc localtime, rtc ntptime,'
' hostname, localname}; default: rtc localtime'),
default=['rtc'],
metavar='parameter', nargs='+'),
options={"-utc": dict(help='[utc] for "set ntptime" '
'command', required=False, nargs=1, type=int)},
alt_ops=['rtc', 'localtime', 'ntptime', 'hostname', 'localname'])
DATETIME = dict(help="prints device's RTC time",
subcmd={},
options={})
UPTIME = dict(help=("prints device's uptime since latest boot, "
"(requires uptime.py and uptime.settime()"
" at boot.py/main.py)"),
subcmd={},
options={})
CYCLES = dict(help=("prints device's cycle count"
"(requires cycles.py and cycles.set()"
" at boot.py/main.py)"),
subcmd={},
options={})
SHASUM = dict(help="shasum SHA-256 tool",
subcmd=dict(help='Get the hash of a file or check a shasum file',
default=[],
metavar='file/pattern',
nargs='*'),
options={"-c": dict(help='check a shasum file',
required=False,
default='')})
TOUCH = dict(help="create a new file",
subcmd=dict(help='indicate a new file/pattern to create',
default=[],
metavar='file/pattern',
nargs='*'),
options={})
UPIP = dict(help="install or manage MicroPython libs",
subcmd=dict(help='indicate a lib/module to {install, info, find}',
default=[],
metavar='file/pattern',
nargs='*'),
options={},
alt_ops=['install', 'info', 'find'])
TIMEIT = dict(help="measure execution time of a script/function",
subcmd=dict(help='indicate a script/function to measure',
default=[],
metavar='script/function',
nargs='*'),
options={})
UPDATE_UPYUTILS = dict(help="update upyutils scripts",
subcmd=dict(help=("filter to match one/multiple "
"upyutils; default: all"),
default=['*'],
nargs='*',
metavar='name/pattern'),
options={},
alt_ops=os.listdir(os.path.join(UPYDEV_PATH,
'upyutils_dir')))
ENABLE_SHELL = dict(help="upload required files so shell is fully operational",
subcmd={},
options={})
DOCS = dict(help="see upydev docs at https://upydev.readthedocs.io/en/latest/",
subcmd=dict(help='indicate a keyword to search',
metavar='keyword', nargs='?'),
options={})
MDOCS = dict(help="see MicroPython docs at docs.micropython.org",
subcmd=dict(help='indicate a keyword to search',
metavar='keyword', nargs='?'),
options={})
CTIME = dict(help="measure execution time of a shell command",
subcmd=dict(help='indicate a command to measure',
default='info',
choices=shell_commands+custom_sh_cmd_kw,
metavar='command'),
options={})
CONFIG = dict(help="set or check config (from *_config.py files)#",
desc="* needs config module\n* to set config --> [config]: "
"[parameter]=[value]",
subcmd=dict(help='indicate parameter to set or check ',
default=[],
metavar='parameter',
nargs='*'),
options={"-y": dict(help='print config in YAML format',
required=False,
default=False,
action='store_true')})
SD = dict(help="commands to manage an sd",
desc='enable an sd module, mount/unmount an sd or auto mount/unmount sd\n\n'
'* auto command needs SD_AM.py in device',
subcmd=dict(help='actions to mount/unmount sd : {enable, init, deinit, auto}',
default='enable',
choices=['enable', 'init', 'deinit', 'auto'],
metavar='command'),
options={"-po": dict(help='pin of LDO 3.3V regulator to enable',
default=15,
type=int),
"-sck": dict(help='sck pin for sd SPI',
default=5,
type=int),
"-mosi": dict(help='mosi pin for sd SPI',
default=18,
type=int),
"-miso": dict(help='miso pin for sd SPI',
default=19,
type=int),
"-cs": dict(help='cs pin for sd SPI',
default=21,
type=int)})
LOAD = dict(help="run local script in device",
desc="load a local script in device buffer and execute it.",
subcmd=dict(help='indicate a file/script to load', default='',
metavar='file',
nargs='*'),
options={})
SHELL_CMD_DICT_PARSER = {"ls": LS, "head": HEAD, "cat": CAT, "mkdir": MKDIR,
"touch": TOUCH, "cd": CD, "mv": MV, "pwd": PWD,
"rm": RM, "rmdir": RMDIR, "du": DU,
"tree": TREE, "df": DF, "mem": MEM, "exit": EXIT,
"vim": VIM, "run": RUN, "reload": RELOAD,
"info": INFO, "id": ID, "uhelp": UHELP, "modules": MODULES,
"uping": UPING, "rssi": RSSI, "net": NET, "ifconfig": IFCONFIG,
"ap": AP, "i2c": I2C, "set": SET, "datetime": DATETIME,
"shasum": SHASUM, "upip": UPIP, "timeit": TIMEIT,
"update_upyutils": UPDATE_UPYUTILS,
"lcd": LCD,
"lsl": LSL, "lpwd": LPWD, "ldu": LDU, "docs": DOCS,
"mdocs": MDOCS, "ctime": CTIME, "enable_sh": ENABLE_SHELL,
"diff": DIFF, "config": CONFIG, "sd": SD, 'uptime': UPTIME,
"cycles": CYCLES, "load": LOAD}
# DEBUGGING
PING = dict(help="ping the device to test if device is"
" reachable, CTRL-C to stop.",
desc="this sends ICMP ECHO_REQUEST packets to device",
subcmd={},
options={"-t": dict(help="device target address",
required=True),
"-p": dict(help='device password or baudrate',
required=True),
"-zt": dict(help='internal flag for zerotierone device',
required=False,
default=False,
action='store_true')})
PROBE = dict(help="to test if a device is reachable",
desc="ping, scan serial ports or ble scan depending on device type",
subcmd={},
options={"-t": dict(help="device target address",
required=True),
"-p": dict(help='device password or baudrate',
required=True),
"-zt": dict(help='internal flag for zerotierone device',
required=False,
default=False,
action='store_true'),
"-G": dict(help='internal flag for group mode',
required=False,
default=None),
"-gg": dict(help='flag for global group',
required=False,
default=False,
action='store_true'),
"-devs": dict(help='flag for filtering devs in global group',
required=False,
nargs='*')})
SCAN = dict(help="to scan for available devices, use a flag to filter for device type",
desc="\ndefault: if no flag provided will do all three scans.",
subcmd={},
options={"-sr": dict(help="scan for SerialDevice",
required=False,
default=False,
action='store_true'),
"-nt": dict(help='scan for WebSocketDevice',
required=False,
default=False,
action='store_true'),
"-bl": dict(help='scan for BleDevice',
required=False,
default=False,
action='store_true')})
RUN = dict(help="run a script in device, CTRL-C to stop",
desc="this calls 'import [script]' in device and reloads it at the end",
subcmd=dict(help=('indicate a script to run'),
metavar='script'),
options={"-t": dict(help="device target address",
required=True),
"-p": dict(help='device password or baudrate',
required=True),
"-wss": dict(help='use WebSocket Secure',
required=False,
default=False,
action='store_true'),
"-s": dict(help='indicate the path of the script if in external fs'
' e.g. an sd card.',
required=False)})
PLAY = dict(help="play custom tasks in ansible playbook style",
desc="task must be yaml file with name, hosts, tasks, name, command\n"
"structure",
subcmd=dict(help=('indicate a task file to play.'),
metavar='task',
choices=["add", "rm", "list"]),
options={"-t": dict(help="device target address",
required=True),
"-p": dict(help='device password or baudrate',
required=True),
"-wss": dict(help='use WebSocket Secure',
required=False,
default=False,
action='store_true')})
TIMEIT = dict(help="to measure execution time of a module/script",
desc="source: https://github.com/peterhinch/micropython-samples"
"/tree/master/timed_function",
subcmd=dict(help=('indicate a script to run'),
metavar='script'),
options={"-t": dict(help="device target address",
required=True),
"-p": dict(help='device password or baudrate',
required=True),
"-wss": dict(help='use WebSocket Secure',
required=False,
default=False,
action='store_true'),
"-s": dict(help='indicate the path of the script if in external'
' fs e.g. an sd card.',
required=False)})
STREAM_TEST = dict(help="to test download speed (from device to host)",
desc="default: 10 MB of random bytes are sent in chunks of 20 kB "
"and received in chunks of 32 kB.\n\n*(sync_tool.py required)",
subcmd={},
options={"-t": dict(help="device target address",
required=True),
"-p": dict(help='device password or baudrate',
required=True),
"-wss": dict(help='use WebSocket Secure',
required=False,
default=False,
action='store_true'),
"-chunk_tx": dict(help='chunk size of data packets in kB to'
' send',
required=False, default=20, type=int),
"-chunk_rx": dict(help='chunk size of data packets in kB to'
' receive',
required=False, default=32, type=int),
"-total_size": dict(help='total size of data packets in MB',
required=False, default=10, type=int)})
SYSCTL = dict(help="to start/stop a script without following the output",
desc="to follow initiate repl",
mode=dict(help='indicate a mode {start,stop}',
metavar='mode',
choices=['start', 'stop']),
subcmd=dict(help='indicate a script to start/stop',
metavar='script'),
options={"-t": dict(help="device target address",
required=True),
"-p": dict(help='device password or baudrate',
required=True),
"-wss": dict(help='use WebSocket Secure',
required=False,
default=False,
action='store_true')})
LOG = dict(help="to log the output of a script running in device",
desc="log levels (sys.stdout and file), run modes (normal, daemon) are"
"available through following options",
subcmd=dict(help=('indicate a script to run and log'),
metavar='script'),
options={"-t": dict(help="device target address",
required=True),
"-p": dict(help='device password or baudrate',
required=True),
"-wss": dict(help='use WebSocket Secure',
required=False,
default=False,
action='store_true'),
"-s": dict(help='indicate the path of the script if in external fs'
' e.g. an sd card.',
required=False),
"-dflev": dict(help='debug file mode level; default: error',
default='error',
choices=['debug', 'info', 'warning', 'error',
'critical']),
"-dslev": dict(help='debug sys.stdout mode level; default: debug',
default='debug',
choices=['debug', 'info', 'warning', 'error',
'critical']),
"-daemon": dict(help='enable "daemon mode", uses nohup so this '
'means running in background, output if any is'
' redirected to [SCRIPT_NAME]_daemon.log',
default=False, action='store_true'),
"-stopd": dict(help='To stop a log daemon script',
default=False, action='store_true'),
"-F": dict(help='To follow a daemon log script file',
action='store_true',
default=False)})
PYTEST = dict(help="run tests on device with pytest (use pytest setup first)",
subcmd=dict(help='indicate a test script to run, any optional '
'arg is passed to pytest',
default=[''],
metavar='test',
nargs='*'),
options={"-t": dict(help="device target address",
required=True),
"-p": dict(help='device password or baudrate',
required=True),
"-wss": dict(help='use WebSocket Secure',
required=False,
default=False,
action='store_true')})
DB_CMD_DICT_PARSER = {"ping": PING, "probe": PROBE, "scan": SCAN, "run": RUN,
"timeit": TIMEIT, "stream_test": STREAM_TEST, "sysctl": SYSCTL,
"log": LOG, "pytest": PYTEST, "play": PLAY}
# DEVICE MANAGEMENT
CONFIG = dict(help="to save device settings",
desc="this will allow set default device configuration or \n"
"target a specific device in a group.\n"
"\ndefault: a configuration file 'upydev_.config' is saved in\n"
"current working directory, use -[options] for custom configuration",
subcmd={},
options={"-t": dict(help="device target address"),
"-p": dict(help='device password or baudrate'),
"-g": dict(help='save configuration in global path',
required=False,
default=False,
action='store_true'),
"-gg": dict(help='save device configuration in global group',
required=False,
default=False,
action='store_true'),
"-@": dict(help='specify a device name',
required=False),
"-zt": dict(help='zerotierone device configuration',
required=False),
"-sec": dict(help='introduce password with no echo',
required=False,
default=False,
action='store_true')})
CHECK = dict(help='to check device information',
desc='shows current device information or specific device\n'
'indicated with -@ option if it is stored in the global group.',
subcmd={},
options={"-@": dict(help='specify device/s name',
required=False,
nargs='+'),
"-i": dict(help='if device is online/connected gets device info',
required=False,
default=False,
action='store_true'),
"-g": dict(help='looks for configuration in global path',
required=False,
default=False,
action='store_true'),
"-wss": dict(help='use WebSocket Secure',
required=False,
default=False,
action='store_true'),
"-G": dict(help='specify a group, default: global group',
required=False)})
SET = dict(help='to set current device configuration',
subcmd={},
options={"-@": dict(help='specify device name',
required=False),
"-g": dict(help='looks for configuration in global path',
required=False,
default=False,
action='store_true'),
"-G": dict(help='specify a group, default: global group',
required=False)})
REGISTER = dict(help='to register a device/group as a shell function so it is callable',
subcmd=dict(help='alias for device/s or group',
metavar='alias',
nargs='*'),
options={"-@": dict(help='specify device name',
required=False,
nargs='+'),
"-gg": dict(help='register a group of devices',
required=False,
default=False,
action='store_true'),
"-s": dict(help='specify a source file, default: ~/.profile',
required=False),
"-g": dict(help='looks for configuration in global path',
required=False,
default=False,
action='store_true')})
LSDEVS = dict(help='to see registered devices or groups',
desc='this also defines a shell function with the same name in the source'
' file',
subcmd={},
options={"-s": dict(help='specify a source file, default: ~/.profile',
required=False),
"-G": dict(help='specify a group, default: global group',
required=False)})
MKG = dict(help='make a group of devices',
desc='this save a config file with devices settings so they can be targeted'
' all together',
subcmd=dict(help='group name',
metavar='group'),
options={"-g": dict(help='save configuration in global path',
required=False,
default=False,
action='store_true'),
"-devs": dict(help='device configuration [name] [target] '
'[password]',
required=False,
nargs='+')})
GG = dict(help='to see global group of devices',
subcmd={},
options={"-g": dict(help='looks for configuration in global path',
required=False,
default=False,
action='store_true')})
SEE = dict(help='to see a group of devices',
subcmd=dict(help='indicate a group name',
metavar='group'),
options={"-g": dict(help='looks for configuration in global path',
required=False,
default=False,
action='store_true')})
MGG = dict(help='manage a group of devices',
desc='add/remove one or more devices to/from a group',
subcmd=dict(help='group name',
metavar='group',
default='UPY_G',
nargs='?'),
options={"-g": dict(help='looks for configuration in global path',
required=False,
default=False,
action='store_true'),
"-add": dict(help='add device/s name',
required=False,
nargs='*'),
"-rm": dict(help='remove device/s name',
required=False,
nargs='*'),
"-gg": dict(help='manage global group',
required=False,
default=False,
action='store_true')})
MKSG = dict(help='manage a subgroup of devices',
desc='make group from another group with a subset of devices',
subcmd=dict(help='group name',
metavar='group',
default='UPY_G',
nargs='?'),
sgroup=dict(help='subgroup name',
metavar='subgroup'),
options={"-g": dict(help='looks for configuration in global path',
required=False,
default=False,
action='store_true'),
"-devs": dict(help='add device/s name',
required=True,
nargs='*'),
"-gg": dict(help='manage global group',
required=False,
default=False,
action='store_true')})
DM_CMD_DICT_PARSER = {"config": CONFIG, "check": CHECK,
"register": REGISTER, "lsdevs": LSDEVS, "mkg": MKG, "gg": GG,
"see": SEE, "mgg": MGG, "mksg": MKSG}
# FW
MPYX = dict(help="freeze .py files using mpy-cross. (must be available in $PATH)",
subcmd=dict(help='indicate a file/pattern to '
'compile',
default=[],
metavar='file/pattern',
nargs='+'),
options={})
FW = dict(help="list or get available firmware from micropython.org",
subcmd=dict(help=('{list, get, update}'
'; default: list'),
default=['list'],
metavar='action', nargs='*'),
options={"-b": dict(help='to indicate device platform',
required=False),
"-n": dict(help='to indicate keyword for filter search',
required=False),
"-t": dict(help="device target address",
required=True),
"-p": dict(help='device password or baudrate',
required=True),
"-wss": dict(help='use WebSocket Secure',
required=False,
default=False,
action='store_true')},
alt_ops=['list', 'get', 'update', 'latest'])
FLASH = dict(help="to flash a firmware file using available serial tools "
"(esptool.py, pydfu.py)",
subcmd=dict(help=('indicate a firmware file to flash'),
metavar='firmware file'),
options={"-i": dict(help='to check wether device platform and '
'firmware file name match',
required=False,
action='store_true'),
"-t": dict(help="device target address",
required=True),
"-p": dict(help='device baudrate',
required=True),
})
OTA = dict(help="to flash a firmware file using OTA system (ota.py, otable.py)",
subcmd=dict(help=('indicate a firmware file to flash'),
metavar='firmware file'),
options={"-i": dict(help='to check wether device platform and '
'firmware file name match',
required=False,
action='store_true'),
"-sec": dict(help='to enable OTA TLS',
required=False,
default=False,
action='store_true'),
"-t": dict(help="device target address",
required=True),
"-p": dict(help='device password',
required=True),
"-wss": dict(help='use WebSocket Secure',
required=False,
default=False,
action='store_true'),
"-zt": dict(help='zerotierone host IP',
required=False,
default=False)})
FW_CMD_DICT_PARSER = {"mpyx": MPYX, "fwr": FW, "flash": FLASH, "ota": OTA}
# GC
RESET = dict(help="reset device",
subcmd={},
options={"-hr": dict(help='to do hard reset',
required=False,
default=False,
action='store_true')})
CONFIG = dict(help="set or check config (from *_config.py files)#",
desc="to set config --> [config]: [parameter]=[value]",
subcmd=dict(help='indicate parameter to set or check ',
default=[],
metavar='parameter',
nargs='*'),
options={"-y": dict(help='print config in YAML format',
required=False,
default=False,
action='store_true')})
KBI = dict(help="to send KeyboardInterrupt to device",
subcmd={},
options={})
UPYSH = dict(help="import upysh",
subcmd={},
options={})
GC_CMD_DICT_PARSER = {"reset": RESET, "uconfig": CONFIG, "kbi": KBI, "upysh": UPYSH}
# KG
KG = dict(help="to generate a key pair (RSA) or key & certificate (ECDSA) for ssl",
desc="generate key pair and exchange with device, or refresh WebREPL "
"password",
mode=dict(help='indicate a key {rsa, ssl, wr}',
metavar='mode',
choices=['rsa', 'ssl', 'wr'],
nargs='?'),
subcmd=dict(help='- gen: generate a ECDSA key/cert (default)'
'\n- rotate: To rotate CA key/cert pair old->new or'
' new->old'
'\n- add: add a device cert to upydev path verify location.'
'\n- export: export CA or device cert to cwd.',
metavar='subcmd',
# just for arg completion
choices=['gen', 'add', 'export', 'rotate', 'dev', 'host', 'CA',
'status'],
default='gen',
nargs='?'),
dst=dict(help='indicate a subject: {dev, host, CA}, default: dev',
metavar='dest',
choices=['dev', 'host', 'CA'],
default='dev',
nargs='?'),
options={"-t": dict(help="device target address",
required=True),
"-p": dict(help='device password or baudrate',
required=True),
"-wss": dict(help='use WebSocket Secure',
required=False,
default=False,
action='store_true'),
"-zt": dict(help='internal flag for zerotierone device',
required=False,
default=False,
action='store_true'),
"-rst": dict(help='internal flag for reset',
required=False,
default=False,
action='store_true'),
"-key_size": dict(help="RSA key size, default:2048",
default=2048,
required=False,
type=int),
"-show_key": dict(help='show generated RSA key',
required=False,
default=False,
action='store_true'),
"-tfkey": dict(help='transfer keys to device',
required=False,
default=False,
action='store_true'),
"-rkey": dict(help='option to remove private device key from host',
required=False,
default=False,
action='store_true'),
"-g": dict(help='option to store new WebREPL password globally',
required=False,
default=False,
action='store_true'),
"-to": dict(help='serial device name to upload to',
required=False),
"-f": dict(help='cert name to add to verify locations',
required=False),
"-a": dict(
help="show all devs ssl cert status",
required=False,
default=False,
action="store_true",
), })
RSA = dict(help="to perform operations with RSA key pair as sign, verify or "
"authenticate",
desc="sign files, verify signatures or authenticate devices with "
"RSA challenge\nusing device keys or host keys",
mode=dict(help='indicate an action {sign, verify, auth}',
metavar='mode',
choices=['sign', 'verify', 'auth']),
subcmd=dict(help='indicate a file to sign/verify',
metavar='file/signature',
nargs='?'),
options={"-t": dict(help="device target address",
required=True),
"-p": dict(help='device password or baudrate',
required=True),
"-wss": dict(help='use WebSocket Secure',
required=False,
default=False,
action='store_true'),
"-host": dict(help='to use host keys',
required=False,
default=False,
action='store_true'),
"-rst": dict(help='internal flag for reset',
required=False,
default=False,
action='store_true')})
KG_CMD_DICT_PARSER = {"kg": KG, "rsa": RSA}
# SHELL-REPL
SHELLREPLS = dict(help="enter shell-repl",
subcmd={},
options={"-t": dict(help="device target address",
required=True),
"-p": dict(help='device password or baudrate',
required=True),
"-wss": dict(help='use WebSocket Secure',
required=False,
default=False,
action='store_true'),
"-rkey": dict(help='generate new password after exit '
'(WebSocketDevices)',
required=False,
action='store_true'),
"-nem": dict(help='force no encryption mode'
' (WebSocketDevices)',
required=False,
action='store_true')})
SHELL_CONFIG = dict(help="configure shell prompt colors",
desc='see\nhttps://python-prompt-toolkit.readthedocs.io/en/master/'
'pages/asking_for_input.html#colors\nfor color options',
subcmd={},
options={"--userpath": dict(help='user path color; default:'
' ansimagenta bold',
required=False,
default='ansimagenta bold'),
"--username": dict(help='user name color; default:'
' ansigreen bold',
required=False,
default='ansigreen bold'),
"--at": dict(help='@ color; default: ansigreen bold',
required=False,
default='ansigreen bold'),
"--colon": dict(help='colon color; default: white',
required=False,
default='#ffffff'),
"--pound": dict(help='pound color; default: ansiblue bold',
required=False,
default='ansiblue bold'),
"--host": dict(help='host color; default: ansigreen bold',
required=False,
default='ansigreen bold'),
"--path": dict(help='path color; default: ansiblue bold',
required=False,
default='ansiblue bold')})
SET_WSS = dict(help="toggle between WebSecREPL and WebREPL",
subcmd={},
options={"-t": dict(help="device target address",
required=True),
"-p": dict(help='device password',
required=True),
"-wss": dict(help='use WebSocket Secure',
required=False,
default=False,
action='store_true'),
})
JUPYTER = dict(help="MicroPython upydevice kernel for jupyter console, CTRL-D to exit",
subcmd={},
options={})
SHELLREPL_CMD_DICT_PARSER = {"shl": SHELLREPLS, "shl-config": SHELL_CONFIG,
"set_wss": SET_WSS,
"jupyterc": JUPYTER}
# REPL
REPLS = dict(help="enter REPL",
subcmd={},
options={"-t": dict(help="device target address",
required=True),
"-p": dict(help='device password or baudrate',
required=True),
"-wss": dict(help='use WebSocket Secure',
required=False,
default=False,
action='store_true'),
"-rkey": dict(help='generate new password after exit '
'(WebSocketDevices)',
required=False,
action='store_true')})
REPL_CMD_DICT_PARSER = {"rpl": REPLS}
# FIO
PUT = dict(help="upload files to device",
subcmd=dict(help='indicate a file/pattern/dir to '
'upload',
default=[],
metavar='file/pattern/dir',
nargs='+'),
options={"-dir": dict(help='path to upload to',
required=False,
default=''),
"-rst": dict(help='to soft reset after upload',
required=False,
default=False,
action='store_true')})
GET = dict(help="download files from device",
subcmd=dict(help='indicate a file/pattern/dir to '
'download',
default=[],
metavar='file/pattern/dir',
nargs='+'),
options={"-dir": dict(help='path to download from',
required=False,
default=''),
"-d": dict(help='depth level search for pattrn', required=False,
default=0,
type=int),
"-fg": dict(help='switch off faster get method',
required=False,
default=True,
action='store_false'),
"-b": dict(help='read buffer for faster get method', required=False,
default=512,
type=int)})
DSYNC = dict(help="recursively sync a folder from/to device's filesystem",
desc="* needs shasum.py in device",
subcmd=dict(help='indicate a dir/pattern to '
'sync',
default=['.'],
metavar='dir/pattern',
nargs='*'),
options={"-rf": dict(help='remove recursive force a dir or file deleted'
' in local/device directory',
required=False,
default=False,
action='store_true'),
"-d": dict(help='sync from device to host', required=False,
default=False,
action='store_true'),
"-fg": dict(help='switch off faster get method',
required=False,
default=True,
action='store_false'),
"-b": dict(help='read buffer for faster get method',
required=False,
default=512,
type=int),
"-t": dict(help='show tree of directory to sync', required=False,
default=False,
action='store_true'),
"-f": dict(help='force sync, no hash check', required=False,
default=False,
action='store_true'),
"-p": dict(help='show diff', required=False,
default=False,
action='store_true'),
"-n": dict(help='dry-run and save stash', required=False,
default=False,
action='store_true'),
"-i": dict(help='ignore file/dir or pattern', required=False,
default=[],
nargs='*')})
UPDATE_UPYUTILS = dict(help="update upyutils scripts",
subcmd=dict(help=("filter to match one/multiple "
"upyutils; default: all"),
default=['*'],
nargs='*',
metavar='name/pattern'),
options={},
alt_ops=os.listdir(os.path.join(UPYDEV_PATH,
'upyutils_dir')))
INSTALL = dict(help="install libraries or modules with upip to ./lib",
subcmd=dict(help='indicate a lib/module to install',
metavar='module'),
options={})
FIO_CMD_DICT_PARSER = {"put": PUT, "get": GET, "dsync": DSYNC,
"update_upyutils": UPDATE_UPYUTILS, "install": INSTALL}
ALL_PARSER = {}
ALL_PARSER.update(SHELL_CMD_DICT_PARSER)
ALL_PARSER.update(DB_CMD_DICT_PARSER)
ALL_PARSER.update(DM_CMD_DICT_PARSER)
ALL_PARSER.update(FW_CMD_DICT_PARSER)
ALL_PARSER.update(GC_CMD_DICT_PARSER)
ALL_PARSER.update(KG_CMD_DICT_PARSER)
ALL_PARSER.update(SHELLREPL_CMD_DICT_PARSER)
ALL_PARSER.update(REPL_CMD_DICT_PARSER)
ALL_PARSER.update(FIO_CMD_DICT_PARSER)
def argopts_complete(option):
if option in ALL_PARSER.keys():
opt_args = []
if ALL_PARSER[option]['subcmd']:
choices = ALL_PARSER[option]['subcmd'].get('choices')
if choices:
opt_args += choices
if 'mode' in ALL_PARSER[option].keys():
choices = ALL_PARSER[option]['mode'].get('choices')
if choices:
opt_args += choices
alt_ops = ALL_PARSER[option].get('alt_ops')
if alt_ops:
opt_args += alt_ops
kw_args = ALL_PARSER[option].get('options')
if kw_args:
opt_args += list(kw_args.keys())
return opt_args
else:
return []
def get_opts_dict(option):
kw_args = ALL_PARSER[option].get('options')
if kw_args:
return kw_args
else:
return {}
| [
"carlosgilglez@gmail.com"
] | carlosgilglez@gmail.com |
ac2cbb0b731b97e581da7a9f035b4ce7209d5dbf | f08336ac8b6f8040f6b2d85d0619d1a9923c9bdf | /223-rectangleArea.py | b77b9c32e8858d4b5b81adab6076c7a69ecfadeb | [] | no_license | MarshalLeeeeee/myLeetCodes | fafadcc35eef44f431a008c1be42b1188e7dd852 | 80e78b153ad2bdfb52070ba75b166a4237847d75 | refs/heads/master | 2020-04-08T16:07:47.943000 | 2019-02-21T01:43:16 | 2019-02-21T01:43:16 | 159,505,231 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 975 | py | '''
223.Rectangle Area
Find the total area covered by two rectilinear rectangles in a 2D plane.
Each rectangle is defined by its bottom left corner and top right corner as shown in the figure.
Example:
Input: A = -3, B = 0, C = 3, D = 4, E = 0, F = -1, G = 9, H = 2
Output: 45
Note:
Assume that the total area is never beyond the maximum possible value of int.
'''
class Solution:
def computeArea(self, A, B, C, D, E, F, G, H):
"""
:type A: int
:type B: int
:type C: int
:type D: int
:type E: int
:type F: int
:type G: int
:type H: int
:rtype: int
"""
X = [[A,0],[C,0],[E,1],[G,1]]
Y = [[B,0],[D,0],[F,1],[H,1]]
X.sort(key=lambda k: k[0])
Y.sort(key=lambda k: k[0])
#print(X,Y)
common = (X[2][0]-X[1][0])*(Y[2][0]-Y[1][0]) if X[0][1] ^ X[1][1] and Y[0][1] ^ Y[1][1] else 0
return (C-A)*(D-B) + (G-E)*(H-F) - common
| [
"marshallee413lmc@sina.com"
] | marshallee413lmc@sina.com |
End of preview. Expand
in Data Studio
README.md exists but content is empty.
- Downloads last month
- 57