hexsha
stringlengths 40
40
| size
int64 7
1.04M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
247
| max_stars_repo_name
stringlengths 4
125
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
sequencelengths 1
10
| max_stars_count
int64 1
368k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
247
| max_issues_repo_name
stringlengths 4
125
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
sequencelengths 1
10
| max_issues_count
int64 1
116k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
247
| max_forks_repo_name
stringlengths 4
125
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
sequencelengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 1
1.04M
| avg_line_length
float64 1.77
618k
| max_line_length
int64 1
1.02M
| alphanum_fraction
float64 0
1
| original_content
stringlengths 7
1.04M
| filtered:remove_function_no_docstring
int64 -102
942k
| filtered:remove_class_no_docstring
int64 -354
977k
| filtered:remove_delete_markers
int64 0
60.1k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
e7e9653d546ade6c8ce9b53c49b25b1b21568a5c | 5,267 | py | Python | VisualGimp/Markup.py | duangsuse/VisualGimp | 79776fded12595ab3c56855b5ae56e2242780b2e | [
"MIT"
] | 2 | 2019-05-07T12:09:11.000Z | 2019-05-08T09:31:44.000Z | VisualGimp/Markup.py | duangsuse-valid-projects/VisualGimp | 79776fded12595ab3c56855b5ae56e2242780b2e | [
"MIT"
] | null | null | null | VisualGimp/Markup.py | duangsuse-valid-projects/VisualGimp | 79776fded12595ab3c56855b5ae56e2242780b2e | [
"MIT"
] | null | null | null | #!/usr/bin/env python2
# -*- encoding: utf-8 -*-
# Gimp Markup Builder
# author: duangsuse
# date: Thu May 02 2019 CST
from os import linesep
from Util import stream_join
class MarkupBuilder:
''' Gimp Markup SGML builder '''
'''
Indent rules:
when starting new tag, write last spaces, last spaces += indent
if new tag is not text tag start (inner is just text), write newline
when leaving tag, last spaces -= indent
'''
indented = property(useindent)
def wnewline(self):
''' see use_indent'''
self.marks += self.nl
def windent(self):
''' see use_indent'''
wrote = 0
for _ in range(0, self.last_spaces):
self.marks += ' '
wrote += 1 # dummy?
return wrote
def cancel_indent(self):
''' cancel last indent '''
if self.indented: self.marks = self.marks[:-self.revert_last_indent_size]
def do_indent(self, entering = True):
''' Write indent, increase last_spaces, saving wrote spaces and newline to revert_last_indent_size '''
if self.indented: do()
def do_last_indent(self, *args, **kwargs):
''' write indenting for last block '''
self.last_spaces -= self.indent
self.do_indent(*args, **kwargs)
self.last_spaces += self.indent
def begin(self, tag, attrs = {}):
'''
Make a tag with name and attributes
Attribute name, value and tag name is escaped
'''
self.last_is_text = False
attrst = str()
tagscape = self.escape(tag)
ary = list(stream_join(attrs.keys(), attrs.values())) if attrs.__class__ is dict else list(attrs)
if len(attrs) != 0:
for n in range(0, len(ary), 2):
attrst += self.escape(str(ary[n]))
attrst += '='
#print(ary)
#print(n)
attrst += "\"%s\"" % self.escape(str(ary[n+1]))
self.marks += '<' + tagscape
if len(attrs) != 0: self.marks += ' '
self.marks += attrst + '>'
# always write indents for next line
# makes its possible to drop last indent (text tag)
self.do_indent()
self.tag_stack.append(tagscape)
return self
def tag(self, *args, **kwargs):
r'''
EDSL using __close__ with syntax
create nodes like:
with xml.tag('span', {color: '#66ccff'}):
xml % 'Q \w\ Q'
'''
self.last_is_text = False
return TagBuilder(self)
def text(self, content):
''' append text content '''
self.last_is_text = True
if self.indented: self.cancel_indent()
self.marks += self.escape(content)
return self
#@staticmethod
#def test():
# m = MarkupBuilder()
# m > 'html'
# m > 'head'
# m > 'title'
# m < 'Hello World'
# m <= 2
# m > 'body'
# m > 'text'
# with m.tag("b"):
# m < 'String'
# m >= ['a', {'id': 'str'}]
# m < '|sg.'
# m <= 4
# return m
def end(self):
''' delimites last tag '''
if not self.last_is_text: # cancel indentation
#print(self.indent, self.tag_stack)
self.cancel_indent()
self.do_indent(False)
self.marks += '</' + self.tag_stack.pop() + '>'
self.do_indent(False)
self.last_is_text = False
# Not cared by Markup indent emitter
def raw(self, raw):
''' write raw text (unescaped) '''
self.marks += raw
return self
def rawtag(self, rawtext):
''' append unescaped raw <> text '''
self.marks += '<'
self.marks += rawtext
self.marks += '>'
def _escape(self, xml):
'''
Escape XML string
' is replaced with '
" is replaced with "
& is replaced with &
< is replaced with <
> is replaced with >
'''
escapes = frozenset("'\"&<>")
replacement = { '\'': 'apos', '"': 'quot', '&': 'amp', '<': 'lt', '>': 'gt' }
if len(xml) < 1: return
output = str()
for i in range(0, len(xml)):
char = xml[i]
if (char in escapes):
output += '&'
output += replacement[char]
output += ';'
else: output += char
return output
escape = classmethod(_escape)
def __str__(self):
''' M(marks)..[tag stack] '''
return 'M(' + self.marks + ')..' + str(self.tag_stack)
__lt__ = text # chain
__gt__ = begin # chain
__add__ = raw # chain
def __contains__(self, tag):
''' is tag inside enclosing tags ? '''
return tag in self.tag_stack
def __ge__(self, tag_attr):
''' xml >= ['markup', {'name': 'abcs'}] '''
mark = tag_attr[0]
attr = tag_attr[1]
self.begin(mark, attr)
def __le__(self, n = 1):
''' Leave (close) N tags '''
while n > 0:
self.end()
n -= 1
| 24.962085 | 106 | 0.584204 | #!/usr/bin/env python2
# -*- encoding: utf-8 -*-
# Gimp Markup Builder
# author: duangsuse
# date: Thu May 02 2019 CST
from os import linesep
from Util import stream_join
class MarkupBuilder:
''' Gimp Markup SGML builder '''
def __init__(self, indent = -1, nl = linesep, buffer = str):
self.marks = buffer()
self.tag_stack = list()
self.nl = nl
self.indent = indent
self.last_spaces = 0
self.revert_last_indent_size = 0
self.last_is_text = False
'''
Indent rules:
when starting new tag, write last spaces, last spaces += indent
if new tag is not text tag start (inner is just text), write newline
when leaving tag, last spaces -= indent
'''
def useindent(self): return self.indent != -1
indented = property(useindent)
def wnewline(self):
''' see use_indent'''
self.marks += self.nl
def windent(self):
''' see use_indent'''
wrote = 0
for _ in range(0, self.last_spaces):
self.marks += ' '
wrote += 1 # dummy?
return wrote
def cancel_indent(self):
''' cancel last indent '''
if self.indented: self.marks = self.marks[:-self.revert_last_indent_size]
def do_indent(self, entering = True):
''' Write indent, increase last_spaces, saving wrote spaces and newline to revert_last_indent_size '''
def do():
self.wnewline()
if (entering):
self.last_spaces += self.indent
else: self.last_spaces -= self.indent
self.revert_last_indent_size = self.windent() +1
if self.indented: do()
def do_last_indent(self, *args, **kwargs):
''' write indenting for last block '''
self.last_spaces -= self.indent
self.do_indent(*args, **kwargs)
self.last_spaces += self.indent
def begin(self, tag, attrs = {}):
'''
Make a tag with name and attributes
Attribute name, value and tag name is escaped
'''
self.last_is_text = False
attrst = str()
tagscape = self.escape(tag)
ary = list(stream_join(attrs.keys(), attrs.values())) if attrs.__class__ is dict else list(attrs)
if len(attrs) != 0:
for n in range(0, len(ary), 2):
attrst += self.escape(str(ary[n]))
attrst += '='
#print(ary)
#print(n)
attrst += "\"%s\"" % self.escape(str(ary[n+1]))
self.marks += '<' + tagscape
if len(attrs) != 0: self.marks += ' '
self.marks += attrst + '>'
# always write indents for next line
# makes its possible to drop last indent (text tag)
self.do_indent()
self.tag_stack.append(tagscape)
return self
def make(self): return self.marks
def tag(self, *args, **kwargs):
r'''
EDSL using __close__ with syntax
create nodes like:
with xml.tag('span', {color: '#66ccff'}):
xml % 'Q \w\ Q'
'''
self.last_is_text = False
class TagBuilder:
def __init__(self, xml):
self.xml = xml
def __enter__(self):
self.xml.begin(*args, attrs = kwargs)
def __exit__(self, *lveinfo):
self.xml.end()
return TagBuilder(self)
def text(self, content):
''' append text content '''
self.last_is_text = True
if self.indented: self.cancel_indent()
self.marks += self.escape(content)
return self
#@staticmethod
#def test():
# m = MarkupBuilder()
# m > 'html'
# m > 'head'
# m > 'title'
# m < 'Hello World'
# m <= 2
# m > 'body'
# m > 'text'
# with m.tag("b"):
# m < 'String'
# m >= ['a', {'id': 'str'}]
# m < '|sg.'
# m <= 4
# return m
def end(self):
''' delimites last tag '''
if not self.last_is_text: # cancel indentation
#print(self.indent, self.tag_stack)
self.cancel_indent()
self.do_indent(False)
self.marks += '</' + self.tag_stack.pop() + '>'
self.do_indent(False)
self.last_is_text = False
# Not cared by Markup indent emitter
def raw(self, raw):
''' write raw text (unescaped) '''
self.marks += raw
return self
def rawtag(self, rawtext):
''' append unescaped raw <> text '''
self.marks += '<'
self.marks += rawtext
self.marks += '>'
def _escape(self, xml):
'''
Escape XML string
' is replaced with '
" is replaced with "
& is replaced with &
< is replaced with <
> is replaced with >
'''
escapes = frozenset("'\"&<>")
replacement = { '\'': 'apos', '"': 'quot', '&': 'amp', '<': 'lt', '>': 'gt' }
if len(xml) < 1: return
output = str()
for i in range(0, len(xml)):
char = xml[i]
if (char in escapes):
output += '&'
output += replacement[char]
output += ';'
else: output += char
return output
escape = classmethod(_escape)
def __str__(self):
''' M(marks)..[tag stack] '''
return 'M(' + self.marks + ')..' + str(self.tag_stack)
__lt__ = text # chain
__gt__ = begin # chain
__add__ = raw # chain
def __contains__(self, tag):
''' is tag inside enclosing tags ? '''
return tag in self.tag_stack
def __ge__(self, tag_attr):
''' xml >= ['markup', {'name': 'abcs'}] '''
mark = tag_attr[0]
attr = tag_attr[1]
self.begin(mark, attr)
def __le__(self, n = 1):
''' Leave (close) N tags '''
while n > 0:
self.end()
n -= 1
| 538 | -4 | 209 |
8796a12ade2e6974f6dfc98adc77e755604d7da8 | 895 | py | Python | sqlalchemy_redshift/__init__.py | Hivestack/sqlalchemy-redshift | 6226ffe4c6f3583606016492641e1bd5d351933a | [
"MIT"
] | null | null | null | sqlalchemy_redshift/__init__.py | Hivestack/sqlalchemy-redshift | 6226ffe4c6f3583606016492641e1bd5d351933a | [
"MIT"
] | null | null | null | sqlalchemy_redshift/__init__.py | Hivestack/sqlalchemy-redshift | 6226ffe4c6f3583606016492641e1bd5d351933a | [
"MIT"
] | null | null | null | from pkg_resources import DistributionNotFound, get_distribution, parse_version
try:
import psycopg2 # noqa: F401
except ImportError:
raise ImportError(
'No module named psycopg2. Please install either '
'psycopg2 or psycopg2-binary package for CPython '
'or psycopg2cffi for Pypy.'
)
for package in ['psycopg2', 'psycopg2-binary', 'psycopg2cffi']:
try:
if get_distribution(package).parsed_version < parse_version('2.5'):
raise ImportError('Minimum required version for psycopg2 is 2.5')
break
except DistributionNotFound:
pass
__version__ = get_distribution('hs-sqlalchemy-redshift').version
from sqlalchemy.dialects import registry
registry.register("redshift", "sqlalchemy_redshift.dialect", "RedshiftDialect")
registry.register(
"redshift.psycopg2", "sqlalchemy_redshift.dialect", "RedshiftDialect"
)
| 31.964286 | 79 | 0.727374 | from pkg_resources import DistributionNotFound, get_distribution, parse_version
try:
import psycopg2 # noqa: F401
except ImportError:
raise ImportError(
'No module named psycopg2. Please install either '
'psycopg2 or psycopg2-binary package for CPython '
'or psycopg2cffi for Pypy.'
)
for package in ['psycopg2', 'psycopg2-binary', 'psycopg2cffi']:
try:
if get_distribution(package).parsed_version < parse_version('2.5'):
raise ImportError('Minimum required version for psycopg2 is 2.5')
break
except DistributionNotFound:
pass
__version__ = get_distribution('hs-sqlalchemy-redshift').version
from sqlalchemy.dialects import registry
registry.register("redshift", "sqlalchemy_redshift.dialect", "RedshiftDialect")
registry.register(
"redshift.psycopg2", "sqlalchemy_redshift.dialect", "RedshiftDialect"
)
| 0 | 0 | 0 |
fdbf1c941811766f3c215aa9700b09effe98e5e6 | 134 | py | Python | ch2/chapter2_features_of_fastapi_02.py | PacktPublishing/Understanding-How-Web-APIs-Work | 63220e7bf6b31315c46650e45c670ca9a01011fc | [
"MIT"
] | 2 | 2021-10-03T09:34:34.000Z | 2021-10-04T04:52:48.000Z | ch2/chapter2_features_of_fastapi_02.py | PacktPublishing/Understanding-How-Web-APIs-Work | 63220e7bf6b31315c46650e45c670ca9a01011fc | [
"MIT"
] | 1 | 2021-04-25T05:57:34.000Z | 2021-04-25T14:49:24.000Z | ch2/chapter2_features_of_fastapi_02.py | PacktPublishing/Understanding-How-Web-APIs-Work | 63220e7bf6b31315c46650e45c670ca9a01011fc | [
"MIT"
] | 3 | 2021-05-13T09:39:27.000Z | 2021-06-29T05:51:46.000Z | # -*- coding: utf-8 -*-
| 33.5 | 57 | 0.58209 | # -*- coding: utf-8 -*-
def message(age: int = 0, name: str = 'stranger') -> str:
return f'Hello {name}, you are {age} years old'
| 88 | 0 | 22 |
515654029ae48e70e4487c739d107ea440403f1d | 8,124 | py | Python | Lib/site-packages/hackedit/app/templates.py | fochoao/cpython | 3dc84b260e5bced65ebc2c45c40c8fa65f9b5aa9 | [
"bzip2-1.0.6",
"0BSD"
] | null | null | null | Lib/site-packages/hackedit/app/templates.py | fochoao/cpython | 3dc84b260e5bced65ebc2c45c40c8fa65f9b5aa9 | [
"bzip2-1.0.6",
"0BSD"
] | 20 | 2021-05-03T18:02:23.000Z | 2022-03-12T12:01:04.000Z | Lib/site-packages/hackedit/app/templates.py | fochoao/cpython | 3dc84b260e5bced65ebc2c45c40c8fa65f9b5aa9 | [
"bzip2-1.0.6",
"0BSD"
] | null | null | null | """
This module contains the top level API for managing the project/file templates.
"""
import json
import logging
import os
import re
from binaryornot.check import is_binary
from hackedit.app import settings
def create(template, dest_dir, answers):
"""
Creates a file/project from the specified template, at the specified directory.
:param template: Template data.
:param dest_dir: Destination directory where to create the file/project
:param answers: Dict of answers for substitution variables
"""
ret_val = []
if not os.path.exists(dest_dir):
os.makedirs(dest_dir)
src_dir = template['path']
for root, dirs, files in os.walk(src_dir):
for file in files:
if file == 'template.json' or file.endswith('.pyc'):
continue
src, dst = get_paths(root, file, src_dir, dest_dir)
dst = subsitute_vars(dst)
encoding = get_file_encoding(src)
try:
content = open_file(src, encoding)
except OSError:
_logger().exception('failed to open file: %r', src)
if encoding != 'binary':
content = subsitute_vars(content)
if file == 'btpad_btn_img_0.png':
print(len(content), encoding)
try:
open_file(dst, encoding, to_write=content)
except PermissionError:
_logger().exception('failed to write file: %r', dst)
else:
ret_val.append(dst)
assert open_file(dst, encoding) == content
for directory in dirs:
src, dst = get_paths(root, directory, src_dir, dest_dir)
dst = subsitute_vars(dst)
try:
os.mkdir(dst)
except PermissionError:
_logger().exception('failed to create directory: %r', dst)
return ret_val
def get_sources():
"""
Returns the template sources (directory associated with a label).
"""
s = settings.load()
tmpl_sources = s.value('_templates/sources', '[]')
tmpl_sources = json.loads(tmpl_sources)
return sorted(tmpl_sources, key=lambda x: x['label'])
def add_source(label, path):
"""
Adds a template source
:param label: Name of the template source.
:param path: Path of the template source.
"""
tmpl_sources = get_sources()
tmpl_sources.append({'label': label, 'path': path})
s = settings.load()
s.setValue('_templates/sources', json.dumps(tmpl_sources))
def rm_source(label):
"""
Removes the specified template source.
:param label: Name of the template source to remove.
"""
tmpl_sources = get_sources()
for src in tmpl_sources:
if src['label'] == label:
tmpl_sources.remove(src)
s = settings.load()
s.setValue('_templates/sources', json.dumps(tmpl_sources))
def clear_sources():
"""
Clear template sources.
"""
s = settings.load()
s.setValue('_templates/sources', json.dumps([]))
def get_templates(category='', source_filter=''):
"""
Gets the list of templates.
:param category: Template category to retrieve.
- use "Project" to get project templates
- use "File" to get file templates
- use an empty string to retrieve them all (default).
:param source: Label of the source of the templates to retrieve. Use an empty string to retrieve
templates from all sources.
"""
def filtered_sources():
"""
Filter list of sources based on the ``source`` parameter.
"""
tmpl_sources = get_sources()
filtered = []
if source_filter:
# only keep the requested template source
for src in tmpl_sources:
if src['label'] == source_filter:
filtered.append(src)
break
else:
filtered = tmpl_sources
return filtered
def get_template(tdir):
"""
Returns template data for the given template directory.
Returns None if the template is invalid.
:param tdir: Template directory to get data from.
"""
tmpl = None
template_json = os.path.join(tdir, 'template.json')
if not os.path.exists(template_json):
# no template.json -> invalid template
_logger().warn('"template.json" not found in template directory: %r', tdir)
else:
try:
with open(template_json) as f:
tmpl = json.loads(f.read())
except (OSError, json.JSONDecodeError):
# unreadable template.json -> invalid template
_logger().exception('failed to read %r', template_json)
tmpl = None
else:
try:
tmpl_cat = tmpl['category']
except KeyError:
# no metadata or no category in template.json -> invalid template
_logger().exception('failed to read category from template metadata, '
'incomplete template.json?')
tmpl = None
else:
# valid template (finally).
tmpl['source'] = src
if category and category != tmpl_cat:
_logger().debug('rejecting template directory: %r, invalid category', tdir)
tmpl = None
return tmpl
def listdir(directory):
"""
Securely list subdirectories of ``directory``.
Returns an empty list of an OSError occurred.
"""
try:
return os.listdir(directory)
except OSError:
return []
for src in filtered_sources():
for tdir in listdir(src['path']):
tdir = os.path.join(src['path'], tdir)
if os.path.isfile(tdir):
continue
tmpl = get_template(tdir)
if tmpl:
tmpl['path'] = tdir
yield tmpl
def get_template(source, template):
"""
Returns the specified template data.
"""
for t in get_templates(source_filter=source):
if t['name'] == template:
return t
return None
if __name__ == '__main__':
clear_sources()
add_source('COBOL', '/home/colin/Documents/hackedit-cobol/hackedit_cobol/templates')
add_source('Python', '/home/colin/Documents/hackedit-python/hackedit_python/templates')
for tmpl in get_templates():
print(json.dumps(tmpl, indent=4, sort_keys=True))
| 31.126437 | 100 | 0.563269 | """
This module contains the top level API for managing the project/file templates.
"""
import json
import logging
import os
import re
from binaryornot.check import is_binary
from hackedit.app import settings
def create(template, dest_dir, answers):
"""
Creates a file/project from the specified template, at the specified directory.
:param template: Template data.
:param dest_dir: Destination directory where to create the file/project
:param answers: Dict of answers for substitution variables
"""
def get_paths(root, path, src_dir, dest_dir):
src_path = os.path.join(root, path)
rel_path = os.path.relpath(src_path, src_dir)
dst_path = os.path.join(dest_dir, rel_path)
return src_path, dst_path
def get_file_encoding(path):
if is_binary(path):
return 'binary'
try:
encodings = template['encodings']
except KeyError:
encodings = ['utf-8', 'cp1252']
for encoding in encodings:
try:
with open(path, encoding=encoding) as f:
f.read()
except UnicodeDecodeError:
continue
else:
return encoding
def open_file(path, encoding, to_write=None):
if encoding == 'binary':
if to_write is None:
mode = 'rb'
else:
mode = 'wb'
encoding = None
else:
if to_write is None:
mode = 'r'
else:
mode = 'w'
content = None
with open(path, mode, encoding=encoding) as f:
if to_write is not None:
f.write(to_write)
else:
content = f.read()
return content
def subsitute_vars(string):
for var, value in answers.items():
string = re.sub('@%s@' % var, value, string)
return string
ret_val = []
if not os.path.exists(dest_dir):
os.makedirs(dest_dir)
src_dir = template['path']
for root, dirs, files in os.walk(src_dir):
for file in files:
if file == 'template.json' or file.endswith('.pyc'):
continue
src, dst = get_paths(root, file, src_dir, dest_dir)
dst = subsitute_vars(dst)
encoding = get_file_encoding(src)
try:
content = open_file(src, encoding)
except OSError:
_logger().exception('failed to open file: %r', src)
if encoding != 'binary':
content = subsitute_vars(content)
if file == 'btpad_btn_img_0.png':
print(len(content), encoding)
try:
open_file(dst, encoding, to_write=content)
except PermissionError:
_logger().exception('failed to write file: %r', dst)
else:
ret_val.append(dst)
assert open_file(dst, encoding) == content
for directory in dirs:
src, dst = get_paths(root, directory, src_dir, dest_dir)
dst = subsitute_vars(dst)
try:
os.mkdir(dst)
except PermissionError:
_logger().exception('failed to create directory: %r', dst)
return ret_val
def get_sources():
"""
Returns the template sources (directory associated with a label).
"""
s = settings.load()
tmpl_sources = s.value('_templates/sources', '[]')
tmpl_sources = json.loads(tmpl_sources)
return sorted(tmpl_sources, key=lambda x: x['label'])
def add_source(label, path):
"""
Adds a template source
:param label: Name of the template source.
:param path: Path of the template source.
"""
tmpl_sources = get_sources()
tmpl_sources.append({'label': label, 'path': path})
s = settings.load()
s.setValue('_templates/sources', json.dumps(tmpl_sources))
def rm_source(label):
"""
Removes the specified template source.
:param label: Name of the template source to remove.
"""
tmpl_sources = get_sources()
for src in tmpl_sources:
if src['label'] == label:
tmpl_sources.remove(src)
s = settings.load()
s.setValue('_templates/sources', json.dumps(tmpl_sources))
def clear_sources():
"""
Clear template sources.
"""
s = settings.load()
s.setValue('_templates/sources', json.dumps([]))
def get_templates(category='', source_filter=''):
"""
Gets the list of templates.
:param category: Template category to retrieve.
- use "Project" to get project templates
- use "File" to get file templates
- use an empty string to retrieve them all (default).
:param source: Label of the source of the templates to retrieve. Use an empty string to retrieve
templates from all sources.
"""
def filtered_sources():
"""
Filter list of sources based on the ``source`` parameter.
"""
tmpl_sources = get_sources()
filtered = []
if source_filter:
# only keep the requested template source
for src in tmpl_sources:
if src['label'] == source_filter:
filtered.append(src)
break
else:
filtered = tmpl_sources
return filtered
def get_template(tdir):
"""
Returns template data for the given template directory.
Returns None if the template is invalid.
:param tdir: Template directory to get data from.
"""
tmpl = None
template_json = os.path.join(tdir, 'template.json')
if not os.path.exists(template_json):
# no template.json -> invalid template
_logger().warn('"template.json" not found in template directory: %r', tdir)
else:
try:
with open(template_json) as f:
tmpl = json.loads(f.read())
except (OSError, json.JSONDecodeError):
# unreadable template.json -> invalid template
_logger().exception('failed to read %r', template_json)
tmpl = None
else:
try:
tmpl_cat = tmpl['category']
except KeyError:
# no metadata or no category in template.json -> invalid template
_logger().exception('failed to read category from template metadata, '
'incomplete template.json?')
tmpl = None
else:
# valid template (finally).
tmpl['source'] = src
if category and category != tmpl_cat:
_logger().debug('rejecting template directory: %r, invalid category', tdir)
tmpl = None
return tmpl
def listdir(directory):
"""
Securely list subdirectories of ``directory``.
Returns an empty list of an OSError occurred.
"""
try:
return os.listdir(directory)
except OSError:
return []
for src in filtered_sources():
for tdir in listdir(src['path']):
tdir = os.path.join(src['path'], tdir)
if os.path.isfile(tdir):
continue
tmpl = get_template(tdir)
if tmpl:
tmpl['path'] = tdir
yield tmpl
def get_template(source, template):
"""
Returns the specified template data.
"""
for t in get_templates(source_filter=source):
if t['name'] == template:
return t
return None
def _logger():
return logging.getLogger(__name__)
if __name__ == '__main__':
clear_sources()
add_source('COBOL', '/home/colin/Documents/hackedit-cobol/hackedit_cobol/templates')
add_source('Python', '/home/colin/Documents/hackedit-python/hackedit_python/templates')
for tmpl in get_templates():
print(json.dumps(tmpl, indent=4, sort_keys=True))
| 1,348 | 0 | 130 |
1a60970d1a4cf3ecc7aacdd16b38eca549a34840 | 1,845 | py | Python | src/tubize/videotomp4.py | olivervinn/tubizescripts | 8756f322d3e31f76f8b77cb8e084ded5941e29fa | [
"MIT"
] | null | null | null | src/tubize/videotomp4.py | olivervinn/tubizescripts | 8756f322d3e31f76f8b77cb8e084ded5941e29fa | [
"MIT"
] | null | null | null | src/tubize/videotomp4.py | olivervinn/tubizescripts | 8756f322d3e31f76f8b77cb8e084ded5941e29fa | [
"MIT"
] | null | null | null | """
Convert video format x to MP4/H.264.
"""
import os
import sys
import logging
from .videometainfo import VideoMetaInfo
from .utils import sizeof_fmt, time_fmt, find_files, check_dependencies, call, ffmpeg
logger = logging.getLogger(__name__)
class VideoToMP4:
"""To Mp4"""
SUPPORTED_EXTENSIONS = ".wmv, .avi, .mkv, .mov, .flv"
RULES = {
".wmv": "-c:v libx264 -crf 19 ",
".avi":
"-vf yadif=1 -c:v h264_nvenc -preset slow -tune film -crf 17",
".mkv": "-c copy",
".mov": "-vcodec h264 -acodec aac -strict -2 -crf 19 ",
".flv": " -r 20 ",
}
def process(self, video_file: str):
"""Convert video files to MP4 container format."""
name = os.path.splitext(video_file)[0]
ext = os.path.splitext(video_file)[1]
new_name = f"{name}.mp4"
if os.path.exists(new_name):
logger.info(f"Skipping file {new_name} already exists!")
elif ext not in VideoToMP4.RULES:
logger.error(f"Skipping unsupported type {ext}!")
else:
print(f'Convert {ext} to MP4 {new_name} ... ')
meta_info = VideoMetaInfo(video_file)
rule = VideoToMP4.RULES[ext]
flags = "-movflags +faststart -pix_fmt yuv420p"
ffmpeg(
f'-i "{video_file}" {flags} {rule} -metadata date="{meta_info.original_date}" "{new_name}"'
)
| 32.368421 | 107 | 0.571816 | """
Convert video format x to MP4/H.264.
"""
import os
import sys
import logging
from .videometainfo import VideoMetaInfo
from .utils import sizeof_fmt, time_fmt, find_files, check_dependencies, call, ffmpeg
logger = logging.getLogger(__name__)
class VideoToMP4:
"""To Mp4"""
SUPPORTED_EXTENSIONS = ".wmv, .avi, .mkv, .mov, .flv"
RULES = {
".wmv": "-c:v libx264 -crf 19 ",
".avi":
"-vf yadif=1 -c:v h264_nvenc -preset slow -tune film -crf 17",
".mkv": "-c copy",
".mov": "-vcodec h264 -acodec aac -strict -2 -crf 19 ",
".flv": " -r 20 ",
}
def process(self, video_file: str):
"""Convert video files to MP4 container format."""
name = os.path.splitext(video_file)[0]
ext = os.path.splitext(video_file)[1]
new_name = f"{name}.mp4"
if os.path.exists(new_name):
logger.info(f"Skipping file {new_name} already exists!")
elif ext not in VideoToMP4.RULES:
logger.error(f"Skipping unsupported type {ext}!")
else:
print(f'Convert {ext} to MP4 {new_name} ... ')
meta_info = VideoMetaInfo(video_file)
rule = VideoToMP4.RULES[ext]
flags = "-movflags +faststart -pix_fmt yuv420p"
ffmpeg(
f'-i "{video_file}" {flags} {rule} -metadata date="{meta_info.original_date}" "{new_name}"'
)
def file(self, filename: str) -> None:
logger.debug(f"converting file {filename}")
self.process(filename)
def directory(self, path: str, extension: str) -> int:
files = find_files(path, extension)
if len(files) < 1:
print("No matching files found in directory!", file=sys.stderr)
else:
for f in files:
self.file(f)
| 351 | 0 | 54 |
8052d0446907259540de210ff2c92410c7342f2e | 117 | py | Python | setup.py | snasiriany/parasol | 88b99704676fb1253b8bc6402665a3174a00072d | [
"MIT"
] | 66 | 2019-01-07T23:59:26.000Z | 2021-12-29T16:51:56.000Z | setup.py | snasiriany/parasol | 88b99704676fb1253b8bc6402665a3174a00072d | [
"MIT"
] | 8 | 2019-01-09T01:35:54.000Z | 2021-08-23T20:05:03.000Z | setup.py | snasiriany/parasol | 88b99704676fb1253b8bc6402665a3174a00072d | [
"MIT"
] | 21 | 2019-03-26T01:02:33.000Z | 2022-01-26T20:34:34.000Z | from setuptools import setup
setup(
name='parasol',
dependency_links=[
],
install_requires=[
]
)
| 13 | 28 | 0.623932 | from setuptools import setup
setup(
name='parasol',
dependency_links=[
],
install_requires=[
]
)
| 0 | 0 | 0 |
79299c770a188b579e6412af89f2263960e65f50 | 568 | py | Python | app/migrations/0007_auto_20211102_1946.py | Rqwannn/Rudemy | fe2d84540f3cc64c0ff6821e5f2fac22675fd381 | [
"MIT"
] | 3 | 2021-12-27T06:16:26.000Z | 2022-01-20T02:13:03.000Z | app/migrations/0007_auto_20211102_1946.py | Rqwannn/Rudemy | fe2d84540f3cc64c0ff6821e5f2fac22675fd381 | [
"MIT"
] | null | null | null | app/migrations/0007_auto_20211102_1946.py | Rqwannn/Rudemy | fe2d84540f3cc64c0ff6821e5f2fac22675fd381 | [
"MIT"
] | null | null | null | # Generated by Django 3.2.8 on 2021-11-02 12:46
from django.db import migrations, models
| 21.846154 | 67 | 0.549296 | # Generated by Django 3.2.8 on 2021-11-02 12:46
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('app', '0006_auto_20211102_1928'),
]
operations = [
migrations.RemoveField(
model_name='profile',
name='skill',
),
migrations.AddField(
model_name='profile',
name='tags',
field=models.ManyToManyField(blank=True, to='app.Tag'),
),
migrations.DeleteModel(
name='Skill',
),
]
| 0 | 454 | 23 |
752ee840202809a32e9848a1a2c9a1828e74e71c | 5,132 | py | Python | oasislmf/model_execution/conf.py | ibailey-SCOR/OasisLMF | 966b4de4e1e64851970f4291c5bdfe7edc20cb7a | [
"BSD-3-Clause"
] | null | null | null | oasislmf/model_execution/conf.py | ibailey-SCOR/OasisLMF | 966b4de4e1e64851970f4291c5bdfe7edc20cb7a | [
"BSD-3-Clause"
] | null | null | null | oasislmf/model_execution/conf.py | ibailey-SCOR/OasisLMF | 966b4de4e1e64851970f4291c5bdfe7edc20cb7a | [
"BSD-3-Clause"
] | null | null | null | import csv
import io
import json
import logging
import os
import warnings
from collections import defaultdict
from ..utils.exceptions import OasisException
from ..utils.log import oasis_log
from .files import GENERAL_SETTINGS_FILE, GUL_SUMMARIES_FILE, IL_SUMMARIES_FILE, MODEL_SETTINGS_FILE
def _get_summaries(summary_file):
"""
Get a list representation of a summary file.
"""
summaries_dict = defaultdict(lambda: {'leccalc': {}})
with io.open(summary_file, 'r', encoding='utf-8') as csvfile:
reader = csv.reader(csvfile)
for row in reader:
id = int(row[0])
if row[1].startswith('leccalc'):
summaries_dict[id]['leccalc'][row[1]] = row[2].lower() == 'true'
else:
summaries_dict[id][row[1]] = row[2].lower() == 'true'
summaries = list()
for id in sorted(summaries_dict):
summaries_dict[id]['id'] = id
summaries.append(summaries_dict[id])
return summaries
@oasis_log
def create_analysis_settings_json(directory):
"""
Generate an analysis settings JSON from a set of
CSV files in a specified directory.
Args:
``directory`` (string): the directory containing the CSV files.
Returns:
The analysis settings JSON.
"""
if not os.path.exists(directory):
error_message = "Directory does not exist: {}".format(directory)
logging.getLogger().error(error_message)
raise OasisException(error_message)
general_settings_file = os.path.join(directory, GENERAL_SETTINGS_FILE)
model_settings_file = os.path.join(directory, MODEL_SETTINGS_FILE)
gul_summaries_file = os.path.join(directory, GUL_SUMMARIES_FILE)
il_summaries_file = os.path.join(directory, IL_SUMMARIES_FILE)
for file in [general_settings_file, model_settings_file, gul_summaries_file, il_summaries_file]:
if not os.path.exists(file):
error_message = "File does not exist: {}".format(directory)
logging.getLogger().error(error_message)
raise OasisException(error_message)
general_settings = dict()
with io.open(general_settings_file, 'r', encoding='utf-8') as csvfile:
reader = csv.reader(csvfile)
for row in reader:
general_settings[row[0]] = eval("{}('{}')".format(row[2], row[1]))
model_settings = dict()
with io.open(model_settings_file, 'r', encoding='utf-8') as csvfile:
reader = csv.reader(csvfile)
for row in reader:
model_settings[row[0]] = eval("{}('{}')".format(row[2], row[1]))
gul_summaries = _get_summaries(gul_summaries_file)
il_summaries = _get_summaries(il_summaries_file)
analysis_settings = general_settings
analysis_settings['model_settings'] = model_settings
analysis_settings['gul_summaries'] = gul_summaries
analysis_settings['il_summaries'] = il_summaries
output_json = json.dumps(analysis_settings)
logging.getLogger().info("Analysis settings json: {}".format(output_json))
return output_json
def read_analysis_settings(analysis_settings_fp, il_files_exist=False,
ri_files_exist=False):
"""Read the analysis settings file"""
# Load analysis_settings file
try:
# Load as a json
with io.open(analysis_settings_fp, 'r', encoding='utf-8') as f:
analysis_settings = json.load(f)
# Extract the analysis_settings part within the json
if analysis_settings.get('analysis_settings'):
analysis_settings = analysis_settings['analysis_settings']
except (IOError, TypeError, ValueError):
raise OasisException('Invalid analysis settings file or file path: {}.'.format(
analysis_settings_fp))
# Reset il_output if the files are not there
if not il_files_exist or 'il_output' not in analysis_settings:
# No insured loss output
analysis_settings['il_output'] = False
analysis_settings['il_summaries'] = []
# Same for ri_output
if not ri_files_exist or 'ri_output' not in analysis_settings:
# No reinsured loss output
analysis_settings['ri_output'] = False
analysis_settings['ri_summaries'] = []
# If we want ri_output, we will need il_output, which needs il_files
if analysis_settings['ri_output'] and not analysis_settings['il_output']:
if not il_files_exist:
warnings.warn("ri_output selected, but il files not found")
analysis_settings['ri_output'] = False
analysis_settings['ri_summaries'] = []
else:
analysis_settings['il_output'] = True
# guard - Check if at least one output type is selected
if not any([
analysis_settings['gul_output'] if 'gul_output' in analysis_settings else False,
analysis_settings['il_output'] if 'il_output' in analysis_settings else False,
analysis_settings['ri_output'] if 'ri_output' in analysis_settings else False,
]):
raise OasisException(
'No valid output settings in: {}'.format(analysis_settings_fp))
return analysis_settings
| 36.657143 | 100 | 0.677319 | import csv
import io
import json
import logging
import os
import warnings
from collections import defaultdict
from ..utils.exceptions import OasisException
from ..utils.log import oasis_log
from .files import GENERAL_SETTINGS_FILE, GUL_SUMMARIES_FILE, IL_SUMMARIES_FILE, MODEL_SETTINGS_FILE
def _get_summaries(summary_file):
"""
Get a list representation of a summary file.
"""
summaries_dict = defaultdict(lambda: {'leccalc': {}})
with io.open(summary_file, 'r', encoding='utf-8') as csvfile:
reader = csv.reader(csvfile)
for row in reader:
id = int(row[0])
if row[1].startswith('leccalc'):
summaries_dict[id]['leccalc'][row[1]] = row[2].lower() == 'true'
else:
summaries_dict[id][row[1]] = row[2].lower() == 'true'
summaries = list()
for id in sorted(summaries_dict):
summaries_dict[id]['id'] = id
summaries.append(summaries_dict[id])
return summaries
@oasis_log
def create_analysis_settings_json(directory):
"""
Generate an analysis settings JSON from a set of
CSV files in a specified directory.
Args:
``directory`` (string): the directory containing the CSV files.
Returns:
The analysis settings JSON.
"""
if not os.path.exists(directory):
error_message = "Directory does not exist: {}".format(directory)
logging.getLogger().error(error_message)
raise OasisException(error_message)
general_settings_file = os.path.join(directory, GENERAL_SETTINGS_FILE)
model_settings_file = os.path.join(directory, MODEL_SETTINGS_FILE)
gul_summaries_file = os.path.join(directory, GUL_SUMMARIES_FILE)
il_summaries_file = os.path.join(directory, IL_SUMMARIES_FILE)
for file in [general_settings_file, model_settings_file, gul_summaries_file, il_summaries_file]:
if not os.path.exists(file):
error_message = "File does not exist: {}".format(directory)
logging.getLogger().error(error_message)
raise OasisException(error_message)
general_settings = dict()
with io.open(general_settings_file, 'r', encoding='utf-8') as csvfile:
reader = csv.reader(csvfile)
for row in reader:
general_settings[row[0]] = eval("{}('{}')".format(row[2], row[1]))
model_settings = dict()
with io.open(model_settings_file, 'r', encoding='utf-8') as csvfile:
reader = csv.reader(csvfile)
for row in reader:
model_settings[row[0]] = eval("{}('{}')".format(row[2], row[1]))
gul_summaries = _get_summaries(gul_summaries_file)
il_summaries = _get_summaries(il_summaries_file)
analysis_settings = general_settings
analysis_settings['model_settings'] = model_settings
analysis_settings['gul_summaries'] = gul_summaries
analysis_settings['il_summaries'] = il_summaries
output_json = json.dumps(analysis_settings)
logging.getLogger().info("Analysis settings json: {}".format(output_json))
return output_json
def read_analysis_settings(analysis_settings_fp, il_files_exist=False,
ri_files_exist=False):
"""Read the analysis settings file"""
# Load analysis_settings file
try:
# Load as a json
with io.open(analysis_settings_fp, 'r', encoding='utf-8') as f:
analysis_settings = json.load(f)
# Extract the analysis_settings part within the json
if analysis_settings.get('analysis_settings'):
analysis_settings = analysis_settings['analysis_settings']
except (IOError, TypeError, ValueError):
raise OasisException('Invalid analysis settings file or file path: {}.'.format(
analysis_settings_fp))
# Reset il_output if the files are not there
if not il_files_exist or 'il_output' not in analysis_settings:
# No insured loss output
analysis_settings['il_output'] = False
analysis_settings['il_summaries'] = []
# Same for ri_output
if not ri_files_exist or 'ri_output' not in analysis_settings:
# No reinsured loss output
analysis_settings['ri_output'] = False
analysis_settings['ri_summaries'] = []
# If we want ri_output, we will need il_output, which needs il_files
if analysis_settings['ri_output'] and not analysis_settings['il_output']:
if not il_files_exist:
warnings.warn("ri_output selected, but il files not found")
analysis_settings['ri_output'] = False
analysis_settings['ri_summaries'] = []
else:
analysis_settings['il_output'] = True
# guard - Check if at least one output type is selected
if not any([
analysis_settings['gul_output'] if 'gul_output' in analysis_settings else False,
analysis_settings['il_output'] if 'il_output' in analysis_settings else False,
analysis_settings['ri_output'] if 'ri_output' in analysis_settings else False,
]):
raise OasisException(
'No valid output settings in: {}'.format(analysis_settings_fp))
return analysis_settings
| 0 | 0 | 0 |
cb8ea6149e57e707c1ee331f670e37c8feb61914 | 6,815 | py | Python | codes/functions.py | Wenupi/protoplanetary_disks | 51f8decbec5415e1da9893316f03d32ca5ab27de | [
"MIT"
] | null | null | null | codes/functions.py | Wenupi/protoplanetary_disks | 51f8decbec5415e1da9893316f03d32ca5ab27de | [
"MIT"
] | null | null | null | codes/functions.py | Wenupi/protoplanetary_disks | 51f8decbec5415e1da9893316f03d32ca5ab27de | [
"MIT"
] | null | null | null | #!/usr/bin/env python
#--------------------------------------------------------------------------------
#Changes the sky coordinates (x,y,z) to the disk coordinates (x_d,y_d,z_d)
#The x axis is the rotation axis
#--------------------------------------------------------------------------------
#Radiative transfer equation
#--------------------------------------------------------------------------------
#Optical depth
#--------------------------------------------------------------------------------
#--------------------------------------------------------------------------------
#Black body radiation
#--------------------------------------------------------------------------------
#--------------------------------------------------------------------------------
#--------------------------------------------------------------------------------
#--------------------------------------------------------------------------------
#Lee las tablas de opacidad DSHARP
#Load opacities
with np.load('default_opacities_smooth.npz') as d:
a_w = d['a']
gsca_w = d['g']
lam_w = d['lam']
k_abs_w = d['k_abs']
k_sca_w = d['k_sca']
lam_avgs = wl
# We split the opacities within the range of frequency to make the calculations faster
k_abs_w = k_abs_w[(0.9*lam_avgs<lam_w) & (1.1*lam_avgs>lam_w),:]
k_sca_w = k_sca_w[(0.9*lam_avgs<lam_w) & (1.1*lam_avgs>lam_w),:]
k_sca_w = k_sca_w*(1. - gsca_w[(0.9*lam_avgs<lam_w) & (1.1*lam_avgs>lam_w),:])
lam_w = lam_w[(0.9*lam_avgs<lam_w) & (1.1*lam_avgs>lam_w)]
opac_grid = opacity.size_average_opacity(lam_avgs, a_w, lam_w, k_abs_w.T, k_sca_w.T, q=3.5, plot=True)
function_ext = interpolate.interp1d(a_w, opac_grid['ka'][:]+opac_grid['ks'][:],kind='cubic')
function_alb = interpolate.interp1d(a_w, opac_grid['ks'][:]/(opac_grid['ka'][:]+opac_grid['ks'][:]),kind='cubic')
if not scattering:
function_alb = interpolate.interp1d(a_w, np.zeros((np.shape(opac_grid['ks'][:]))),kind='cubic')
| 43.685897 | 134 | 0.501981 | #!/usr/bin/env python
#--------------------------------------------------------------------------------
#Changes the sky coordinates (x,y,z) to the disk coordinates (x_d,y_d,z_d)
#The x axis is the rotation axis
def FUN_rotation(x,y,z):
x_d = x
y_d = y*np.cos(inc) - z*np.sin(inc)
z_d = y*np.sin(inc) + z*np.cos(inc)
return x_d,y_d,z_d
#--------------------------------------------------------------------------------
#Radiative transfer equation
def FUN_intensity(I,z,x,y,optde):
x_d,y_d,z_d = FUN_rotation(x,y,z)
density = EQ_density(x_d,y_d,z_d)
amax = EQ_amax(x_d,y_d,z_d)
opa = function_ext(amax)
S = funcion_S([z_d,y_d,x_d])
# print ('x,y,z', x,y,z)
# print (S, x_d, y_d, z_d)
# print (optde(z))
dIdz = -S*opa*density*np.exp(-optde(z)) #z es la variable de integracion (debe ser evaluada en cualquier punto)
return dIdz
#--------------------------------------------------------------------------------
#Optical depth
def FUN_tau(tt,z,x,y):
x_d,y_d,z_d = FUN_rotation(x,y,z)
density = EQ_density(x_d,y_d,z_d)
amax = EQ_amax(x_d,y_d,z_d)
opa = function_ext(amax)
dtau = -opa*density
return dtau
#--------------------------------------------------------------------------------
def FUN_tau_zaxis(tt,z,x,y):
x_d,y_d,z_d = x,y,z
density = EQ_density(x_d,y_d,z_d)
amax = EQ_amax(x_d,y_d,z_d)
opa = function_ext(amax)
dtau = -opa*density
return dtau
#--------------------------------------------------------------------------------
#Black body radiation
def FUN_BB(nu,T):
# B = 2.*hP*nu**3/clight**2/( np.exp(hP*nu/kB/T) - 1.)
B = 1./( np.exp(hP*nu/kB/T) - 1.)
return B
#--------------------------------------------------------------------------------
def FUN_limits_mult(xx,yy):
Hout = EQ_Height(Rout)
lim_z = Rout*np.sin(inc) + 2.*Hout*np.cos(inc) #Based on the geometry of the disk
lim_y = Rout*np.cos(inc) + 2.*Hout*np.sin(inc) #Based on the geometry of the disk
z_arr = np.linspace(1.1*lim_z, -1.1*lim_z, 200)
z_crit = []
if ((np.abs(xx) <=Rout) and (np.abs(yy) <= lim_y)):
xd,yd,zd = FUN_rotation(xx,yy,z_arr)
crit = np.zeros((len(z_arr)))
###############################################################################
#Funciona pero podria ser optimizado
###############################################################################
for ii in range(len(z_arr)): #Crea un vector de densidad en la linea de vision
if (EQ_density(xd,yd[ii],zd[ii]) == 0.):
crit[ii] = 0
else:
crit[ii] = 1
for ii in range(len(z_arr)): #Ve los indices donde cambia de 0 a algun valor, o de algun valor a 0 (fronteras)
if ( (ii != 0) and (crit[ii] - crit[ii-1] != 0 )):
z_crit.append(z_arr[ii])
elif(ii == 0 and crit[0] == 1):
z_crit.append(z_arr[0])
###############################################################################
return z_crit
#--------------------------------------------------------------------------------
def FUN_creates_source_function(x_array,y_array):
#Arrays and limits
Hout = EQ_Height(Rout)
z_array = np.linspace(-2.*Hout, 2.*Hout, 200)
Sfunction = np.zeros((len(z_array),len(y_array),len(x_array)))
Temfunction = np.zeros((len(z_array),len(y_array),len(x_array)))
op_depth_p = np.zeros((len(y_array),len(x_array)))
#Computes the optical depth (perpendicular to the disk midplane)
for j in range(len(y_array)):
for i in range(len(x_array)):
if(x_array[i] == 0. and y_array[j] == 0.):
Sfunction[:,j,i] = 0.
Temfunction[:,j,i] = 0.
else:
rad = np.sqrt(x_array[i]**2 + y_array[j]**2)
Hscale = EQ_Height(rad)
z_integ = np.linspace(2.*Hscale,-2.*Hscale,200)
sol = odeint(FUN_tau_zaxis,0.,z_integ,args=(x_array[i],y_array[j])).T[0]
op_depth_p[j][i] = sol[len(z_integ)-1]
inter_opt = interpolate.interp1d(z_integ,sol,kind='linear', bounds_error=False,fill_value=0.)
for k in range(len(z_array)):
amax = EQ_amax(x_array[i],y_array[j],z_array[k])
albedo = function_alb(amax)
##########Temperature##########
Omega2 = Ggrav*Mstar/(rad*AU)**3
Teff4 = 3.*Mdot*Omega2/8./np.pi/sigmaB
Tacc4 = 3./4.*(7.*inter_opt(abs(z_array[k])) + 2./3.)*Teff4
Tirr4 = Tstar**4./4.*(Rstar/rad/AU)**2*np.exp(-7.*inter_opt(abs(z_array[k]))/phi_angle)
Temfunction[k,j,i] = (Tacc4 + Tirr4)**(0.25)
#Temfunction[k,j,i] = EQ_temperature(x_array[i],y_array[j],z_array[k])
###############################
Sfunction[k,j,i] = FUN_BB(nu,Temfunction[k,j,i])*(1.+ albedo*FUN_f(inter_opt(z_array[k]),op_depth_p[j][i],albedo))
#Crea funcion fuente y temperatura en 3D
funcion_S = RegularGridInterpolator((z_array, y_array, x_array), Sfunction,bounds_error=False,fill_value=None)
funcion_T = RegularGridInterpolator((z_array, y_array, x_array), Temfunction,bounds_error=False,fill_value=None)
return funcion_S, funcion_T
#--------------------------------------------------------------------------------
def FUN_f(t,tau,alb):
eps = np.sqrt(1.-alb)
fff = np.exp(-np.sqrt(3.)*eps*t) + np.exp(np.sqrt(3.)*eps*(t-tau))
fff = fff/( np.exp(-np.sqrt(3.)*eps*tau)*(eps-1.) - (eps+1.) )
return fff
#--------------------------------------------------------------------------------
#Lee las tablas de opacidad DSHARP
#Load opacities
with np.load('default_opacities_smooth.npz') as d:
a_w = d['a']
gsca_w = d['g']
lam_w = d['lam']
k_abs_w = d['k_abs']
k_sca_w = d['k_sca']
lam_avgs = wl
# We split the opacities within the range of frequency to make the calculations faster
k_abs_w = k_abs_w[(0.9*lam_avgs<lam_w) & (1.1*lam_avgs>lam_w),:]
k_sca_w = k_sca_w[(0.9*lam_avgs<lam_w) & (1.1*lam_avgs>lam_w),:]
k_sca_w = k_sca_w*(1. - gsca_w[(0.9*lam_avgs<lam_w) & (1.1*lam_avgs>lam_w),:])
lam_w = lam_w[(0.9*lam_avgs<lam_w) & (1.1*lam_avgs>lam_w)]
opac_grid = opacity.size_average_opacity(lam_avgs, a_w, lam_w, k_abs_w.T, k_sca_w.T, q=3.5, plot=True)
function_ext = interpolate.interp1d(a_w, opac_grid['ka'][:]+opac_grid['ks'][:],kind='cubic')
function_alb = interpolate.interp1d(a_w, opac_grid['ks'][:]/(opac_grid['ka'][:]+opac_grid['ks'][:]),kind='cubic')
if not scattering:
function_alb = interpolate.interp1d(a_w, np.zeros((np.shape(opac_grid['ks'][:]))),kind='cubic')
| 4,676 | 0 | 176 |
d281bf9d519356903906b4ce02f43f84e40f8147 | 2,893 | py | Python | F0AM_Tools/TUV_to_mat.py | jdhask/pyMCM | 32b65e1dff2e9626df5d52623fd1ac4af29f8c57 | [
"MIT"
] | 1 | 2021-11-15T19:24:40.000Z | 2021-11-15T19:24:40.000Z | F0AM_Tools/TUV_to_mat.py | jdhask/pyMCM | 32b65e1dff2e9626df5d52623fd1ac4af29f8c57 | [
"MIT"
] | null | null | null | F0AM_Tools/TUV_to_mat.py | jdhask/pyMCM | 32b65e1dff2e9626df5d52623fd1ac4af29f8c57 | [
"MIT"
] | 2 | 2021-11-15T19:23:46.000Z | 2021-11-29T12:42:26.000Z | # -*- coding: utf-8 -*-
"""
Created on Wed Jun 16 18:06:05 2021
@author: jhask
"""
import csv
import pandas as pd
import numpy as np
import re
import scipy.io as sio
import os
# Map MCM names to TUV labels
j_vals_dict= dict({
'O3 -> O2 + O(1D)':'J1',
'O3 -> O2 + O(3P)':'J2',
'H2O2 -> 2 OH':'J3',
'NO2 -> NO + O(3P)':'J4',
'NO3 -> NO + O2':'J5',
'NO3 -> NO2 + O(3P)':'J6',
'HNO2 -> OH + NO':'J7',
'HNO3 -> OH + NO2':'J8',
'CH2O -> H + HCO':'J11',
'CH2O -> H2 + CO':'J12',
'CH3CHO -> CH3 + HCO':'J13',
'C2H5CHO -> C2H5 + HCO':'J14',
'CH2=C(CH3)CHO -> Products':'J18',
'CH3COCH3 -> CH3CO + CH3':'J21',
'CH3COCH2CH3 -> CH3CO + CH2CH3':'J22',
'CH3COCH=CH2 -> Products':'J23',
'CHOCHO -> H2 + 2CO':'J31',
'CHOCHO -> CH2O + CO':'J32',
'CHOCHO -> HCO + HCO':'J33',
'CH3COCHO -> CH3CO + HCO':'J34',
'CH3COCOCH3 -> Products':'J35',
'CH3OOH -> CH3O + OH':'J41',
'CH3ONO2 -> CH3O + NO2':'J51',
'C2H5ONO2 -> C2H5O + NO2':'J52',
'n-C3H7ONO2 -> C3H7O + NO2':'J53',
'CH3CHONO2CH3 -> CH3CHOCH3 + NO2':'J54',
'C(CH3)3(ONO2) -> C(CH3)3(O.) + NO2':'J55',
'CH3COCH2(ONO2) -> CH3COCH2(O.) + NO2':'J56',
'CH2(OH)COCH3 -> CH3CO + CH2(OH)':'Jn10',
'CH2=CHCHO -> Products':'Jn11',
'CH3CO(OONO2) -> CH3CO(OO) + NO2':'Jn14',
'CH3CO(OONO2) -> CH3CO(O) + NO3':'Jn15',
'CH3(OONO2) -> CH3(OO) + NO2':'Jn16',
'CH3(OONO2) -> CH3(OO) + NO2':'Jn17',
'N2O5 -> NO3 + NO2':'Jn19',
'N2O5 -> NO3 + NO + O(3P)':'Jn20',
'HNO4 -> HO2 + NO2':'Jn21'})
#TUV output file.
file= 'C:/Users/jhask/OneDrive/Documents/MATLAB/F0AM/Setups/SOAS_RCIM/foam_6_29_out.txt'
with open(file, "r",errors="ignore") as f: # read line by line.
reader = csv.reader(f, delimiter="\t")
# Initialize vars we fill in reading the file.
ln_num = 0; map_cols=dict({})
in_species_list=False;
pass_go=False
for row in reader:
line = " ".join(row) # read line by line.
hdrs= [key for key in list(j_vals_dict.keys()) if key in line]
if len(hdrs) > 0 :
headers= re.search(r"[\d]*[\=\w]", line)
print(line, hdrs, j_vals_dict[ hdrs[:][0]])
if headers: map_cols[headers.group()]=j_vals_dict[ hdrs[:][0]]
if (pass_go is True) and ('------' not in line ):
# Append the j-values to the dataframe at this point in time.
splt= [float(item) for item in line.split(" ") if item !='']
df.loc[len(df)]=np.array(splt)
if 'time, hrs. sza, deg.' in line:
pass_go=True
df=pd.DataFrame(columns= ['time', 'sza']+ list(map_cols.values()))
to_mat={name: col.values for name, col in df.items()}
filename= os.path.join('C:/Users/jhask/OneDrive/Documents/MATLAB/F0AM/Setups/SOAS_RCIM/'+'F0AM_tuv.mat')
sio.savemat(filename, to_mat)
print(filename)
| 30.452632 | 105 | 0.5458 | # -*- coding: utf-8 -*-
"""
Created on Wed Jun 16 18:06:05 2021
@author: jhask
"""
import csv
import pandas as pd
import numpy as np
import re
import scipy.io as sio
import os
# Map MCM names to TUV labels
j_vals_dict= dict({
'O3 -> O2 + O(1D)':'J1',
'O3 -> O2 + O(3P)':'J2',
'H2O2 -> 2 OH':'J3',
'NO2 -> NO + O(3P)':'J4',
'NO3 -> NO + O2':'J5',
'NO3 -> NO2 + O(3P)':'J6',
'HNO2 -> OH + NO':'J7',
'HNO3 -> OH + NO2':'J8',
'CH2O -> H + HCO':'J11',
'CH2O -> H2 + CO':'J12',
'CH3CHO -> CH3 + HCO':'J13',
'C2H5CHO -> C2H5 + HCO':'J14',
'CH2=C(CH3)CHO -> Products':'J18',
'CH3COCH3 -> CH3CO + CH3':'J21',
'CH3COCH2CH3 -> CH3CO + CH2CH3':'J22',
'CH3COCH=CH2 -> Products':'J23',
'CHOCHO -> H2 + 2CO':'J31',
'CHOCHO -> CH2O + CO':'J32',
'CHOCHO -> HCO + HCO':'J33',
'CH3COCHO -> CH3CO + HCO':'J34',
'CH3COCOCH3 -> Products':'J35',
'CH3OOH -> CH3O + OH':'J41',
'CH3ONO2 -> CH3O + NO2':'J51',
'C2H5ONO2 -> C2H5O + NO2':'J52',
'n-C3H7ONO2 -> C3H7O + NO2':'J53',
'CH3CHONO2CH3 -> CH3CHOCH3 + NO2':'J54',
'C(CH3)3(ONO2) -> C(CH3)3(O.) + NO2':'J55',
'CH3COCH2(ONO2) -> CH3COCH2(O.) + NO2':'J56',
'CH2(OH)COCH3 -> CH3CO + CH2(OH)':'Jn10',
'CH2=CHCHO -> Products':'Jn11',
'CH3CO(OONO2) -> CH3CO(OO) + NO2':'Jn14',
'CH3CO(OONO2) -> CH3CO(O) + NO3':'Jn15',
'CH3(OONO2) -> CH3(OO) + NO2':'Jn16',
'CH3(OONO2) -> CH3(OO) + NO2':'Jn17',
'N2O5 -> NO3 + NO2':'Jn19',
'N2O5 -> NO3 + NO + O(3P)':'Jn20',
'HNO4 -> HO2 + NO2':'Jn21'})
#TUV output file.
file= 'C:/Users/jhask/OneDrive/Documents/MATLAB/F0AM/Setups/SOAS_RCIM/foam_6_29_out.txt'
with open(file, "r",errors="ignore") as f: # read line by line.
reader = csv.reader(f, delimiter="\t")
# Initialize vars we fill in reading the file.
ln_num = 0; map_cols=dict({})
in_species_list=False;
pass_go=False
for row in reader:
line = " ".join(row) # read line by line.
hdrs= [key for key in list(j_vals_dict.keys()) if key in line]
if len(hdrs) > 0 :
headers= re.search(r"[\d]*[\=\w]", line)
print(line, hdrs, j_vals_dict[ hdrs[:][0]])
if headers: map_cols[headers.group()]=j_vals_dict[ hdrs[:][0]]
if (pass_go is True) and ('------' not in line ):
# Append the j-values to the dataframe at this point in time.
splt= [float(item) for item in line.split(" ") if item !='']
df.loc[len(df)]=np.array(splt)
if 'time, hrs. sza, deg.' in line:
pass_go=True
df=pd.DataFrame(columns= ['time', 'sza']+ list(map_cols.values()))
to_mat={name: col.values for name, col in df.items()}
filename= os.path.join('C:/Users/jhask/OneDrive/Documents/MATLAB/F0AM/Setups/SOAS_RCIM/'+'F0AM_tuv.mat')
sio.savemat(filename, to_mat)
print(filename)
| 0 | 0 | 0 |
1d7b25e9a1db4f378a05b7199423917d7b5b9f81 | 1,343 | py | Python | extract_url.py | nickinack/extract_url | d084ca0a791d5c50ab2accaee7cb4d0b981bd132 | [
"MIT"
] | 2 | 2022-02-07T05:51:36.000Z | 2022-02-07T05:52:11.000Z | extract_url.py | nickinack/extract_url | d084ca0a791d5c50ab2accaee7cb4d0b981bd132 | [
"MIT"
] | null | null | null | extract_url.py | nickinack/extract_url | d084ca0a791d5c50ab2accaee7cb4d0b981bd132 | [
"MIT"
] | 1 | 2020-05-18T08:29:22.000Z | 2020-05-18T08:29:22.000Z | '''
Imports
'''
from config import *
from newspaper import Article
import sys as sys
import pandas as pd
import csv
from collections import defaultdict
import re
'''
URL Extract
'''
columns = defaultdict(list)
with open('SecurityIDRBT.csv') as f:
reader = csv.DictReader(f) # read rows into a dictionary format
for row in reader: # read a row as {column1: value1, column2: value2,...}
for (k,v) in row.items(): # go over each column name and value
columns[k].append(v) # append the value into the appropriate list
url_list = [] # based on column name k
for element in range(len(columns['Body'])):
urls = re.findall('https?://(?:[-\w.]|(?:%[\da-fA-F]{2}))+', columns['Body'][element])
for url in urls:
url_list.append(url)
'''
Find Unique URLs and filter with semantic search results
'''
url_unique = []
for element in url_list:
if element not in url_unique:
if element not in common_urls_http:
if element not in common_urls_https:
url_unique.append(element)
'''
Write it in a new CSV
'''
with open('url.csv', 'w',newline='') as myfile:
wr = csv.writer(myfile, quoting=csv.QUOTE_ALL)
for word in url_unique:
wr.writerow([word])
| 29.844444 | 95 | 0.603127 | '''
Imports
'''
from config import *
from newspaper import Article
import sys as sys
import pandas as pd
import csv
from collections import defaultdict
import re
'''
URL Extract
'''
columns = defaultdict(list)
with open('SecurityIDRBT.csv') as f:
reader = csv.DictReader(f) # read rows into a dictionary format
for row in reader: # read a row as {column1: value1, column2: value2,...}
for (k,v) in row.items(): # go over each column name and value
columns[k].append(v) # append the value into the appropriate list
url_list = [] # based on column name k
for element in range(len(columns['Body'])):
urls = re.findall('https?://(?:[-\w.]|(?:%[\da-fA-F]{2}))+', columns['Body'][element])
for url in urls:
url_list.append(url)
'''
Find Unique URLs and filter with semantic search results
'''
url_unique = []
for element in url_list:
if element not in url_unique:
if element not in common_urls_http:
if element not in common_urls_https:
url_unique.append(element)
'''
Write it in a new CSV
'''
with open('url.csv', 'w',newline='') as myfile:
wr = csv.writer(myfile, quoting=csv.QUOTE_ALL)
for word in url_unique:
wr.writerow([word])
| 0 | 0 | 0 |
56b682792eb61ccb189ac68b9d7a874cbd6c0a60 | 3,279 | py | Python | test/python/test_mapper_coupling.py | kifumi/qiskit-terra | 203fca6d694a18824a6b12cbabd3dd2c64dd12ae | [
"Apache-2.0"
] | 1 | 2018-11-01T01:35:43.000Z | 2018-11-01T01:35:43.000Z | test/python/test_mapper_coupling.py | a-amaral/qiskit-terra | e73beba1e68de2617046a7e1e9eeac375b61de81 | [
"Apache-2.0"
] | null | null | null | test/python/test_mapper_coupling.py | a-amaral/qiskit-terra | e73beba1e68de2617046a7e1e9eeac375b61de81 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# Copyright 2018, IBM.
#
# This source code is licensed under the Apache License, Version 2.0 found in
# the LICENSE.txt file in the root directory of this source tree.
# pylint: disable=missing-docstring
from qiskit.mapper import _coupling
from .common import QiskitTestCase
| 36.433333 | 88 | 0.633425 | # -*- coding: utf-8 -*-
# Copyright 2018, IBM.
#
# This source code is licensed under the Apache License, Version 2.0 found in
# the LICENSE.txt file in the root directory of this source tree.
# pylint: disable=missing-docstring
from qiskit.mapper import _coupling
from .common import QiskitTestCase
class CouplingTest(QiskitTestCase):
def test_coupling_dict2list(self):
input_dict = {0: [1, 2], 1: [2]}
result = _coupling.coupling_dict2list(input_dict)
expected = [[0, 1], [0, 2], [1, 2]]
self.assertEqual(expected, result)
def test_coupling_dict2list_empty_dict(self):
self.assertIsNone(_coupling.coupling_dict2list({}))
def test_coupling_list2dict(self):
input_list = [[0, 1], [0, 2], [1, 2]]
result = _coupling.coupling_list2dict(input_list)
expected = {0: [1, 2], 1: [2]}
self.assertEqual(expected, result)
def test_coupling_list2dict_empty_list(self):
self.assertIsNone(_coupling.coupling_list2dict([]))
def test_empty_coupling_class(self):
coupling = _coupling.Coupling()
self.assertEqual(0, coupling.size())
self.assertEqual([], coupling.get_qubits())
self.assertEqual([], coupling.get_edges())
self.assertFalse(coupling.connected())
self.assertEqual("", str(coupling))
def test_coupling_str(self):
coupling_dict = {0: [1, 2], 1: [2]}
coupling = _coupling.Coupling(coupling_dict)
expected = ("qubits: q[0] @ 1, q[1] @ 2, q[2] @ 3\n"
"edges: q[0]-q[1], q[0]-q[2], q[1]-q[2]")
self.assertEqual(expected, str(coupling))
def test_coupling_compute_distance(self):
coupling_dict = {0: [1, 2], 1: [2]}
coupling = _coupling.Coupling(coupling_dict)
self.assertTrue(coupling.connected())
coupling.compute_distance()
qubits = coupling.get_qubits()
result = coupling.distance(qubits[0], qubits[1])
self.assertEqual(1, result)
def test_coupling_compute_distance_coupling_error(self):
coupling = _coupling.Coupling()
self.assertRaises(_coupling.CouplingError, coupling.compute_distance)
def test_add_qubit(self):
coupling = _coupling.Coupling()
self.assertEqual("", str(coupling))
coupling.add_qubit(('q', 0))
self.assertEqual("qubits: q[0] @ 1", str(coupling))
def test_add_qubit_not_tuple(self):
coupling = _coupling.Coupling()
self.assertRaises(_coupling.CouplingError, coupling.add_qubit, 'q0')
def test_add_qubit_tuple_incorrect_form(self):
coupling = _coupling.Coupling()
self.assertRaises(_coupling.CouplingError, coupling.add_qubit,
('q', '0'))
def test_add_edge(self):
coupling = _coupling.Coupling()
self.assertEqual("", str(coupling))
coupling.add_edge(("q", 0), ('q', 1))
expected = ("qubits: q[0] @ 1, q[1] @ 2\n"
"edges: q[0]-q[1]")
self.assertEqual(expected, str(coupling))
def test_distance_error(self):
"""Test distance method validation."""
graph = _coupling.Coupling({0: [1, 2], 1: [2]})
self.assertRaises(_coupling.CouplingError, graph.distance, ('q0', 0), ('q1', 1))
| 2,385 | 566 | 23 |
b9bba5bf54a9320d3bc8a8acd4f1c2d5d0aaa83f | 66,268 | py | Python | discretisedfield/tests/test_field.py | StephenPotato/discretisedfield | de49577b47acadd9372854252688194c348844a3 | [
"BSD-3-Clause"
] | 1 | 2021-03-13T09:43:52.000Z | 2021-03-13T09:43:52.000Z | discretisedfield/tests/test_field.py | StephenPotato/discretisedfield | de49577b47acadd9372854252688194c348844a3 | [
"BSD-3-Clause"
] | null | null | null | discretisedfield/tests/test_field.py | StephenPotato/discretisedfield | de49577b47acadd9372854252688194c348844a3 | [
"BSD-3-Clause"
] | null | null | null | import os
import re
import k3d
import types
import random
import pytest
import numbers
import tempfile
import itertools
import numpy as np
import discretisedfield as df
import matplotlib.pyplot as plt
from .test_mesh import TestMesh
| 33.621512 | 79 | 0.470333 | import os
import re
import k3d
import types
import random
import pytest
import numbers
import tempfile
import itertools
import numpy as np
import discretisedfield as df
import matplotlib.pyplot as plt
from .test_mesh import TestMesh
def check_field(field):
assert isinstance(field.mesh, df.Mesh)
assert isinstance(field.dim, int)
assert field.dim > 0
assert isinstance(field.array, np.ndarray)
assert field.array.shape == (*field.mesh.n, field.dim)
average = field.average
assert isinstance(average, (tuple, numbers.Real))
rstr = repr(field)
assert isinstance(rstr, str)
pattern = (r'^Field\(mesh=Mesh\(region=Region\(p1=\(.+\), '
r'p2=\(.+\)\), .+\), dim=\d+\)$')
assert re.search(pattern, rstr)
assert isinstance(field.__iter__(), types.GeneratorType)
assert len(list(field)) == len(field.mesh)
line = field.line(p1=field.mesh.region.pmin,
p2=field.mesh.region.pmax,
n=5)
assert isinstance(line, df.Line)
assert line.n == 5
plane = field.plane('z', n=(2, 2))
assert isinstance(plane, df.Field)
assert len(plane.mesh) == 4
assert plane.mesh.n == (2, 2, 1)
project = field.project('z')
assert isinstance(project, df.Field)
assert project.mesh.n[2] == 1
assert isinstance(field(field.mesh.region.centre), (tuple, numbers.Real))
assert isinstance(field(field.mesh.region.random_point()),
(tuple, numbers.Real))
assert field == field
assert not field != field
assert +field == field
assert -(-field) == field
assert field + field == 2*field
assert field - (-field) == field + field
assert 1*field == field
assert -1*field == -field
if field.dim == 1:
grad = field.grad
assert isinstance(grad, df.Field)
assert grad.dim == 3
assert all(i not in dir(field) for i in 'xyz')
assert isinstance((field * df.dx).integral(), numbers.Real)
assert isinstance((field * df.dy).integral(), numbers.Real)
assert isinstance((field * df.dz).integral(), numbers.Real)
assert isinstance((field * df.dV).integral(), numbers.Real)
assert isinstance((field.plane('z') * df.dS).integral(), tuple)
assert isinstance((field.plane('z') * abs(df.dS)).integral(),
numbers.Real)
if field.dim == 3:
norm = field.norm
assert isinstance(norm, df.Field)
assert norm == abs(field)
assert norm.dim == 1
assert isinstance(field.x, df.Field)
assert field.x.dim == 1
assert isinstance(field.y, df.Field)
assert field.y.dim == 1
assert isinstance(field.z, df.Field)
assert field.z.dim == 1
div = field.div
assert isinstance(div, df.Field)
assert div.dim == 1
curl = field.curl
assert isinstance(curl, df.Field)
assert curl.dim == 3
field_plane = field.plane('z')
assert isinstance((field * df.dx).integral(), tuple)
assert isinstance((field * df.dy).integral(), tuple)
assert isinstance((field * df.dz).integral(), tuple)
assert isinstance((field * df.dV).integral(), tuple)
assert isinstance((field.plane('z') @ df.dS).integral(), numbers.Real)
assert isinstance((field.plane('z') * abs(df.dS)).integral(), tuple)
orientation = field.orientation
assert isinstance(orientation, df.Field)
assert orientation.dim == 3
assert all(i in dir(field) for i in 'xyz')
class TestField:
def setup(self):
# Get meshes using valid arguments from TestMesh.
tm = TestMesh()
tm.setup()
self.meshes = []
for p1, p2, n, cell in tm.valid_args:
region = df.Region(p1=p1, p2=p2)
mesh = df.Mesh(region=region, n=n, cell=cell)
self.meshes.append(mesh)
# Create lists of field values.
self.consts = [0, -5., np.pi, 1e-15, 1.2e12, random.random()]
self.iters = [(0, 0, 1),
(0, -5.1, np.pi),
[70, 1e15, 2*np.pi],
[5, random.random(), np.pi],
np.array([4, -1, 3.7]),
np.array([2.1, 0.0, -5*random.random()])]
self.sfuncs = [lambda c: 1,
lambda c: -2.4,
lambda c: -6.4e-15,
lambda c: c[0] + c[1] + c[2] + 1,
lambda c: (c[0]-1)**2 - c[1]+7 + c[2]*0.1,
lambda c: np.sin(c[0]) + np.cos(c[1]) - np.sin(2*c[2])]
self.vfuncs = [lambda c: (1, 2, 0),
lambda c: (-2.4, 1e-3, 9),
lambda c: (c[0], c[1], c[2] + 100),
lambda c: (c[0]+c[2]+10, c[1], c[2]+1),
lambda c: (c[0]-1, c[1]+70, c[2]*0.1),
lambda c: (np.sin(c[0]), np.cos(c[1]), -np.sin(2*c[2]))]
# Create a field for plotting tests
mesh = df.Mesh(p1=(-5e-9, -5e-9, -5e-9),
p2=(5e-9, 5e-9, 5e-9),
n=(5, 5, 5))
def norm_fun(point):
x, y, z = point
if x**2 + y**2 <= (5e-9)**2:
return 1e5
else:
return 0
def value_fun(point):
x, y, z = point
if x <= 0:
return (0, 0, 1)
else:
return (0, 0, -1)
self.pf = df.Field(mesh, dim=3, value=value_fun, norm=norm_fun)
def test_init_valid_args(self):
for mesh in self.meshes:
for value in self.consts + self.sfuncs:
f = df.Field(mesh, dim=1, value=value)
check_field(f)
for value in self.iters + self.vfuncs:
f = df.Field(mesh, dim=3, value=value)
check_field(f)
def test_init_invalid_args(self):
with pytest.raises(TypeError):
mesh = 'meaningless_mesh_string'
f = df.Field(mesh, dim=1)
for mesh in self.meshes:
for dim in [0, -1, 'dim', (2, 3)]:
with pytest.raises((ValueError, TypeError)):
f = df.Field(mesh, dim=dim)
def test_set_with_ndarray(self):
for mesh in self.meshes:
f = df.Field(mesh, dim=3)
f.value = np.ones((*f.mesh.n, f.dim,))
check_field(f)
assert isinstance(f.value, np.ndarray)
assert f.average == (1, 1, 1)
with pytest.raises(ValueError):
f.value = np.ones((2, 2))
def test_set_with_callable(self):
for mesh in self.meshes:
for func in self.sfuncs:
f = df.Field(mesh, dim=1, value=func)
check_field(f)
rp = f.mesh.region.random_point()
# Make sure to be at the centre of the cell
rp = f.mesh.index2point(f.mesh.point2index(rp))
assert f(rp) == func(rp)
for mesh in self.meshes:
for func in self.vfuncs:
f = df.Field(mesh, dim=3, value=func)
check_field(f)
rp = f.mesh.region.random_point()
rp = f.mesh.index2point(f.mesh.point2index(rp))
assert np.all(f(rp) == func(rp))
def test_set_with_dict(self):
p1 = (0, 0, 0)
p2 = (10e-9, 10e-9, 10e-9)
n = (5, 5, 5)
subregions = {'r1': df.Region(p1=(0, 0, 0), p2=(4e-9, 10e-9, 10e-9)),
'r2': df.Region(p1=(4e-9, 0, 0),
p2=(10e-9, 10e-9, 10e-9))}
mesh = df.Mesh(p1=p1, p2=p2, n=n, subregions=subregions)
field = df.Field(mesh, dim=3, value={'r1': (0, 0, 1),
'r2': (0, 0, 2),
'r1:r2': (0, 0, 5)})
assert np.all(field((3e-9, 7e-9, 9e-9)) == (0, 0, 1))
assert np.all(field((8e-9, 2e-9, 9e-9)) == (0, 0, 2))
def test_set_exception(self):
for mesh in self.meshes:
with pytest.raises(ValueError):
f = df.Field(mesh, dim=3, value='meaningless_string')
with pytest.raises(ValueError):
f = df.Field(mesh, dim=3, value=5+5j)
def test_value(self):
p1 = (0, 0, 0)
p2 = (10e-9, 10e-9, 10e-9)
n = (5, 5, 5)
mesh = df.Mesh(p1=p1, p2=p2, n=n)
f = df.Field(mesh, dim=3)
f.value = (1, 1, 1)
assert f.value == (1, 1, 1)
f.array[0, 0, 0, 0] = 3
assert isinstance(f.value, np.ndarray)
def test_norm(self):
mesh = df.Mesh(p1=(0, 0, 0), p2=(10, 10, 10), cell=(5, 5, 5))
f = df.Field(mesh, dim=3, value=(2, 2, 2))
assert np.all(f.norm.value == 2*np.sqrt(3))
assert np.all(f.norm.array == 2*np.sqrt(3))
assert np.all(f.array == 2)
f.norm = 1
assert np.all(f.norm.value == 1)
assert np.all(f.norm.array == 1)
assert np.all(f.array == 1/np.sqrt(3))
f.array[0, 0, 0, 0] = 3
assert isinstance(f.norm.value, np.ndarray)
assert not np.all(f.norm.value == 1)
for mesh in self.meshes:
for value in self.iters + self.vfuncs:
for norm_value in [1, 2.1, 50, 1e-3, np.pi]:
f = df.Field(mesh, dim=3, value=value, norm=norm_value)
# Compute norm.
norm = f.array[..., 0]**2
norm += f.array[..., 1]**2
norm += f.array[..., 2]**2
norm = np.sqrt(norm)
assert norm.shape == f.mesh.n
assert f.norm.array.shape == (*f.mesh.n, 1)
assert np.all(abs(norm - norm_value) < 1e-12)
# Exception
mesh = df.Mesh(p1=(0, 0, 0), p2=(10, 10, 10), cell=(1, 1, 1))
f = df.Field(mesh, dim=1, value=-5)
with pytest.raises(ValueError):
f.norm = 5
def test_norm_is_not_preserved(self):
p1 = (0, 0, 0)
p2 = (10e-9, 10e-9, 10e-9)
n = (5, 5, 5)
mesh = df.Mesh(p1=p1, p2=p2, n=n)
f = df.Field(mesh, dim=3)
f.value = (0, 3, 0)
f.norm = 1
assert np.all(f.norm.array == 1)
f.value = (0, 2, 0)
assert np.all(f.norm.value != 1)
assert np.all(f.norm.array == 2)
def test_norm_zero_field_exception(self):
p1 = (0, 0, 0)
p2 = (10e-9, 10e-9, 10e-9)
n = (5, 5, 5)
mesh = df.Mesh(p1=p1, p2=p2, n=n)
f = df.Field(mesh, dim=3, value=(0, 0, 0))
with pytest.raises(ValueError):
f.norm = 1
def test_zero(self):
p1 = (0, 0, 0)
p2 = (10e-9, 10e-9, 10e-9)
n = (5, 5, 5)
mesh = df.Mesh(p1=p1, p2=p2, n=n)
f = df.Field(mesh, dim=1, value=1e-6)
zf = f.zero
assert f.mesh == zf.mesh
assert f.dim == zf.dim
assert not np.any(zf.array)
f = df.Field(mesh, dim=3, value=(5, -7, 1e3))
zf = f.zero
assert f.mesh == zf.mesh
assert f.dim == zf.dim
assert not np.any(zf.array)
def test_orientation(self):
p1 = (-5e-9, -5e-9, -5e-9)
p2 = (5e-9, 5e-9, 5e-9)
cell = (1e-9, 1e-9, 1e-9)
mesh = df.Mesh(p1=p1, p2=p2, cell=cell)
# No zero-norm cells
f = df.Field(mesh, dim=3, value=(2, 0, 0))
assert f.orientation.average == (1, 0, 0)
# With zero-norm cells
def value_fun(point):
x, y, z = point
if x <= 0:
return (0, 0, 0)
else:
return (3, 0, 4)
f = df.Field(mesh, dim=3, value=value_fun)
assert f.orientation((-1.5e-9, 3e-9, 0)) == (0, 0, 0)
assert f.orientation((1.5e-9, 3e-9, 0)) == (0.6, 0, 0.8)
f = df.Field(mesh, dim=1, value=0)
with pytest.raises(ValueError):
of = f.orientation
def test_average(self):
value = -1e-3 + np.pi
tol = 1e-12
p1 = (-5e-9, -5e-9, -5e-9)
p2 = (5e-9, 5e-9, 5e-9)
cell = (1e-9, 1e-9, 1e-9)
mesh = df.Mesh(p1=p1, p2=p2, cell=cell)
f = df.Field(mesh, dim=1, value=2)
assert abs(f.average - 2) < tol
f = df.Field(mesh, dim=3, value=(0, 1, 2))
assert np.allclose(f.average, (0, 1, 2))
def test_field_component(self):
for mesh in self.meshes:
f = df.Field(mesh, dim=3, value=(1, 2, 3))
assert all(isinstance(getattr(f, i), df.Field) for i in 'xyz')
assert all(getattr(f, i).dim == 1 for i in 'xyz')
f = df.Field(mesh, dim=2, value=(1, 2))
assert all(isinstance(getattr(f, i), df.Field) for i in 'xy')
assert all(getattr(f, i).dim == 1 for i in 'xy')
# Exception.
f = df.Field(mesh, dim=1, value=1)
with pytest.raises(AttributeError):
fx = f.x.dim
def test_get_attribute_exception(self):
for mesh in self.meshes:
f = df.Field(mesh, dim=3)
with pytest.raises(AttributeError) as excinfo:
f.__getattr__('nonexisting_attribute')
assert 'has no attribute' in str(excinfo.value)
def test_dir(self):
for mesh in self.meshes:
f = df.Field(mesh, dim=3, value=(5, 6, -9))
assert all(attr in dir(f) for attr in ['x', 'y', 'z', 'div'])
assert 'grad' not in dir(f)
f = df.Field(mesh, dim=1, value=1)
assert all(attr not in dir(f) for attr in ['x', 'y', 'z', 'div'])
assert 'grad' in dir(f)
def test_eq(self):
p1 = (-5e-9, -5e-9, -5e-9)
p2 = (15e-9, 5e-9, 5e-9)
cell = (5e-9, 1e-9, 2.5e-9)
mesh = df.Mesh(p1=p1, p2=p2, cell=cell)
f1 = df.Field(mesh, dim=1, value=0.2)
f2 = df.Field(mesh, dim=1, value=0.2)
f3 = df.Field(mesh, dim=1, value=3.1)
f4 = df.Field(mesh, dim=3, value=(1, -6, 0))
f5 = df.Field(mesh, dim=3, value=(1, -6, 0))
assert f1 == f2
assert not f1 != f2
assert not f1 == f3
assert f1 != f3
assert not f2 == f4
assert f2 != f4
assert f4 == f5
assert not f4 != f5
assert not f1 == 0.2
assert f1 != 0.2
def test_allclose(self):
p1 = (-5e-9, -5e-9, -5e-9)
p2 = (15e-9, 5e-9, 5e-9)
cell = (5e-9, 1e-9, 2.5e-9)
mesh = df.Mesh(p1=p1, p2=p2, cell=cell)
f1 = df.Field(mesh, dim=1, value=0.2)
f2 = df.Field(mesh, dim=1, value=0.2+1e-9)
f3 = df.Field(mesh, dim=1, value=0.21)
f4 = df.Field(mesh, dim=3, value=(1, -6, 0))
f5 = df.Field(mesh, dim=3, value=(1, -6+1e-8, 0))
f6 = df.Field(mesh, dim=3, value=(1, -6.01, 0))
assert f1.allclose(f2)
assert not f1.allclose(f3)
assert not f1.allclose(f5)
assert f4.allclose(f5)
assert not f4.allclose(f6)
with pytest.raises(TypeError):
f1.allclose(2)
def test_point_neg(self):
p1 = (-5e-9, -5e-9, -5e-9)
p2 = (5e-9, 5e-9, 5e-9)
cell = (1e-9, 1e-9, 1e-9)
mesh = df.Mesh(p1=p1, p2=p2, cell=cell)
# Scalar field
f = df.Field(mesh, dim=1, value=3)
res = -f
check_field(res)
assert res.average == -3
assert f == +f
assert f == -(-f)
assert f == +(-(-f))
# Vector field
f = df.Field(mesh, dim=3, value=(1, 2, -3))
res = -f
check_field(res)
assert res.average == (-1, -2, 3)
assert f == +f
assert f == -(-f)
assert f == +(-(-f))
def test_pow(self):
p1 = (0, 0, 0)
p2 = (15e-9, 6e-9, 6e-9)
cell = (3e-9, 3e-9, 3e-9)
mesh = df.Mesh(p1=p1, p2=p2, cell=cell)
# Scalar field
f = df.Field(mesh, dim=1, value=2)
res = f**2
assert res.average == 4
res = f**(-1)
assert res.average == 0.5
# Attempt vector field
f = df.Field(mesh, dim=3, value=(1, 2, -2))
with pytest.raises(ValueError):
res = f**2
# Attempt to raise to non numbers.Real
f = df.Field(mesh, dim=1, value=2)
with pytest.raises(TypeError):
res = f**'a'
with pytest.raises(TypeError):
res = f**f
def test_add_subtract(self):
p1 = (0, 0, 0)
p2 = (5e-9, 10e-9, -5e-9)
n = (2, 2, 1)
mesh = df.Mesh(p1=p1, p2=p2, n=n)
# Scalar fields
f1 = df.Field(mesh, dim=1, value=1.2)
f2 = df.Field(mesh, dim=1, value=-0.2)
res = f1 + f2
assert res.average == 1
res = f1 - f2
assert res.average == 1.4
f1 += f2
assert f1.average == 1
f1 -= f2
assert f1.average == 1.2
# Vector fields
f1 = df.Field(mesh, dim=3, value=(1, 2, 3))
f2 = df.Field(mesh, dim=3, value=(-1, -3, -5))
res = f1 + f2
assert res.average == (0, -1, -2)
res = f1 - f2
assert res.average == (2, 5, 8)
f1 += f2
assert f1.average == (0, -1, -2)
f1 -= f2
assert f1.average == (1, 2, 3)
# Artithmetic checks
assert f1 + f2 + (1, 1, 1) == (1, 1, 1) + f2 + f1
assert f1 - f2 - (0, 0, 0) == (0, 0, 0) - (f2 - f1)
assert f1 + (f1 + f2) == (f1 + f1) + f2
assert f1 - (f1 + f2) == f1 - f1 - f2
assert f1 + f2 - f1 == f2 + (0, 0, 0)
# Constants
f1 = df.Field(mesh, dim=1, value=1.2)
f2 = df.Field(mesh, dim=3, value=(-1, -3, -5))
res = f1 + 2
assert res.average == 3.2
res = f1 - 1.2
assert res.average == 0
f1 += 2.5
assert f1.average == 3.7
f1 -= 3.7
assert f1.average == 0
res = f2 + (1, 3, 5)
assert res.average == (0, 0, 0)
res = f2 - (1, 2, 3)
assert res.average == (-2, -5, -8)
f2 += (1, 1, 1)
assert f2.average == (0, -2, -4)
f2 -= (-1, -2, 3)
assert f2.average == (1, 0, -7)
# Exceptions
with pytest.raises(TypeError):
res = f1 + '2'
# Fields with different dimensions
with pytest.raises(ValueError):
res = f1 + f2
# Fields defined on different meshes
mesh1 = df.Mesh(p1=(0, 0, 0), p2=(5, 5, 5), n=(1, 1, 1))
mesh2 = df.Mesh(p1=(0, 0, 0), p2=(3, 3, 3), n=(1, 1, 1))
f1 = df.Field(mesh1, dim=1, value=1.2)
f2 = df.Field(mesh2, dim=1, value=1)
with pytest.raises(ValueError):
res = f1 + f2
with pytest.raises(ValueError):
f1 += f2
with pytest.raises(ValueError):
f1 -= f2
def test_mul_truediv(self):
p1 = (0, 0, 0)
p2 = (5e-9, 5e-9, 5e-9)
cell = (1e-9, 5e-9, 1e-9)
mesh = df.Mesh(p1=p1, p2=p2, cell=cell)
# Scalar fields
f1 = df.Field(mesh, dim=1, value=1.2)
f2 = df.Field(mesh, dim=1, value=-2)
res = f1 * f2
assert res.average == -2.4
res = f1 / f2
assert res.average == -0.6
f1 *= f2
assert f1.average == -2.4
f1 /= f2
assert f1.average == 1.2
# Scalar field with a constant
f = df.Field(mesh, dim=1, value=5)
res = f * 2
assert res.average == 10
res = 3 * f
assert res.average == 15
res = f * (1, 2, 3)
assert res.average == (5, 10, 15)
res = (1, 2, 3) * f
assert res.average == (5, 10, 15)
res = f / 2
assert res.average == 2.5
res = 10 / f
assert res.average == 2
res = (5, 10, 15) / f
assert res.average == (1, 2, 3)
f *= 10
assert f.average == 50
f /= 10
assert f.average == 5
# Scalar field with a vector field
f1 = df.Field(mesh, dim=1, value=2)
f2 = df.Field(mesh, dim=3, value=(-1, -3, 5))
res = f1 * f2 # __mul__
assert res.average == (-2, -6, 10)
res = f2 * f1 # __rmul__
assert res.average == (-2, -6, 10)
res = f2 / f1 # __truediv__
assert res.average == (-0.5, -1.5, 2.5)
f2 *= f1 # __imul__
assert f2.average == (-2, -6, 10)
f2 /= f1 # __truediv__
assert f2.average == (-1, -3, 5)
with pytest.raises(ValueError):
res = f1 / f2 # __rtruediv__
# Vector field with a scalar
f = df.Field(mesh, dim=3, value=(1, 2, 0))
res = f * 2
assert res.average == (2, 4, 0)
res = 5 * f
assert res.average == (5, 10, 0)
res = f / 2
assert res.average == (0.5, 1, 0)
f *= 2
assert f.average == (2, 4, 0)
f /= 2
assert f.average == (1, 2, 0)
with pytest.raises(ValueError):
res = 10 / f
# Further checks
f1 = df.Field(mesh, dim=1, value=2)
f2 = df.Field(mesh, dim=3, value=(-1, -3, -5))
assert f1 * f2 == f2 * f1
assert 1.3 * f2 == f2 * 1.3
assert -5 * f2 == f2 * (-5)
assert (1, 2.2, -1) * f1 == f1 * (1, 2.2, -1)
assert f1 * (f1 * f2) == (f1 * f1) * f2
assert f1 * f2 / f1 == f2
# Exceptions
f1 = df.Field(mesh, dim=1, value=1.2)
f2 = df.Field(mesh, dim=3, value=(-1, -3, -5))
with pytest.raises(TypeError):
res = f2 * 'a'
with pytest.raises(TypeError):
res = 'a' / f1
with pytest.raises(ValueError):
res = f2 * f2
with pytest.raises(ValueError):
res = f2 / f2
with pytest.raises(ValueError):
res = 1 / f2
with pytest.raises(ValueError):
res = f1 / f2
with pytest.raises(TypeError):
f2 *= 'a'
with pytest.raises(TypeError):
f2 /= 'a'
with pytest.raises(ValueError):
f1 /= f2
# Fields defined on different meshes
mesh1 = df.Mesh(p1=(0, 0, 0), p2=(5, 5, 5), n=(1, 1, 1))
mesh2 = df.Mesh(p1=(0, 0, 0), p2=(3, 3, 3), n=(1, 1, 1))
f1 = df.Field(mesh1, dim=1, value=1.2)
f2 = df.Field(mesh2, dim=1, value=1)
with pytest.raises(ValueError):
res = f1 * f2
with pytest.raises(ValueError):
res = f1 / f2
with pytest.raises(ValueError):
f1 *= f2
with pytest.raises(ValueError):
f1 /= f2
def test_dot(self):
p1 = (0, 0, 0)
p2 = (10, 10, 10)
cell = (2, 2, 2)
mesh = df.Mesh(p1=p1, p2=p2, cell=cell)
# Zero vectors
f1 = df.Field(mesh, dim=3, value=(0, 0, 0))
res = f1@f1
assert res.dim == 1
assert res.average == 0
# Orthogonal vectors
f1 = df.Field(mesh, dim=3, value=(1, 0, 0))
f2 = df.Field(mesh, dim=3, value=(0, 1, 0))
f3 = df.Field(mesh, dim=3, value=(0, 0, 1))
assert (f1 @ f2).average == 0
assert (f1 @ f3).average == 0
assert (f2 @ f3).average == 0
assert (f1 @ f1).average == 1
assert (f2 @ f2).average == 1
assert (f3 @ f3).average == 1
# Check if commutative
assert f1 @ f2 == f2 @ f1
assert f1 @ (-1, 3, 2.2) == (-1, 3, 2.2) @ f1
# Vector field with a constant
f = df.Field(mesh, dim=3, value=(1, 2, 3))
res = (1, 1, 1) @ f
assert res.average == 6
res = f @ [1, 1, 1]
assert res.average == 6
# Spatially varying vectors
def value_fun1(point):
x, y, z = point
return (x, y, z)
def value_fun2(point):
x, y, z = point
return (z, x, y)
f1 = df.Field(mesh, dim=3, value=value_fun1)
f2 = df.Field(mesh, dim=3, value=value_fun2)
# Check if commutative
assert f1 @ f2 == f2 @ f1
# The dot product should be x*z + y*x + z*y
assert (f1 @ f2)((1, 1, 1)) == 3
assert (f1 @ f2)((3, 1, 1)) == 7
assert (f1 @ f2)((5, 7, 1)) == 47
# Check norm computed using dot product
assert f1.norm == (f1 @ f1)**(0.5)
# Exceptions
f1 = df.Field(mesh, dim=1, value=1.2)
f2 = df.Field(mesh, dim=3, value=(-1, -3, -5))
with pytest.raises(ValueError):
res = f1 @ f2
with pytest.raises(ValueError):
res = f1 @ f2
with pytest.raises(TypeError):
res = f1 @ 3
# Fields defined on different meshes
mesh1 = df.Mesh(p1=(0, 0, 0), p2=(5, 5, 5), n=(1, 1, 1))
mesh2 = df.Mesh(p1=(0, 0, 0), p2=(3, 3, 3), n=(1, 1, 1))
f1 = df.Field(mesh1, dim=3, value=(1, 2, 3))
f2 = df.Field(mesh2, dim=3, value=(3, 2, 1))
with pytest.raises(ValueError):
res = f1 @ f2
def test_cross(self):
p1 = (0, 0, 0)
p2 = (10, 10, 10)
cell = (2, 2, 2)
mesh = df.Mesh(p1=p1, p2=p2, cell=cell)
# Zero vectors
f1 = df.Field(mesh, dim=3, value=(0, 0, 0))
res = f1 & f1
assert res.dim == 3
assert res.average == (0, 0, 0)
# Orthogonal vectors
f1 = df.Field(mesh, dim=3, value=(1, 0, 0))
f2 = df.Field(mesh, dim=3, value=(0, 1, 0))
f3 = df.Field(mesh, dim=3, value=(0, 0, 1))
assert (f1 & f2).average == (0, 0, 1)
assert (f1 & f3).average == (0, -1, 0)
assert (f2 & f3).average == (1, 0, 0)
assert (f1 & f1).average == (0, 0, 0)
assert (f2 & f2).average == (0, 0, 0)
assert (f3 & f3).average == (0, 0, 0)
# Constants
assert (f1 & (0, 1, 0)).average == (0, 0, 1)
assert ((0, 1, 0) & f1).average == (0, 0, 1)
# Check if not comutative
assert f1 & f2 == -(f2 & f1)
assert f1 & f3 == -(f3 & f1)
assert f2 & f3 == -(f3 & f2)
f1 = df.Field(mesh, dim=3, value=lambda point: (point[0],
point[1],
point[2]))
f2 = df.Field(mesh, dim=3, value=lambda point: (point[2],
point[0],
point[1]))
# The cross product should be
# (y**2-x*z, z**2-x*y, x**2-y*z)
assert (f1 & f2)((1, 1, 1)) == (0, 0, 0)
assert (f1 & f2)((3, 1, 1)) == (-2, -2, 8)
assert (f2 & f1)((3, 1, 1)) == (2, 2, -8)
assert (f1 & f2)((5, 7, 1)) == (44, -34, 18)
# Exceptions
f1 = df.Field(mesh, dim=1, value=1.2)
f2 = df.Field(mesh, dim=3, value=(-1, -3, -5))
with pytest.raises(TypeError):
res = f1 & 2
with pytest.raises(ValueError):
res = f1 & f2
# Fields defined on different meshes
mesh1 = df.Mesh(p1=(0, 0, 0), p2=(5, 5, 5), n=(1, 1, 1))
mesh2 = df.Mesh(p1=(0, 0, 0), p2=(3, 3, 3), n=(1, 1, 1))
f1 = df.Field(mesh1, dim=3, value=(1, 2, 3))
f2 = df.Field(mesh2, dim=3, value=(3, 2, 1))
with pytest.raises(ValueError):
res = f1 & f2
def test_lshift(self):
p1 = (0, 0, 0)
p2 = (10e6, 10e6, 10e6)
cell = (5e6, 5e6, 5e6)
mesh = df.Mesh(p1=p1, p2=p2, cell=cell)
f1 = df.Field(mesh, dim=1, value=1)
f2 = df.Field(mesh, dim=1, value=-3)
f3 = df.Field(mesh, dim=1, value=5)
res = f1 << f2 << f3
assert res.dim == 3
assert res.average == (1, -3, 5)
# Different dimensions
f1 = df.Field(mesh, dim=1, value=1.2)
f2 = df.Field(mesh, dim=2, value=(-1, -3))
res = f1 << f2
assert res.average == (1.2, -1, -3)
res = f2 << f1
assert res.average == (-1, -3, 1.2)
# Constants
f1 = df.Field(mesh, dim=1, value=1.2)
res = f1 << 2
assert res.average == (1.2, 2)
res = f1 << (1, -1)
assert res.average == (1.2, 1, -1)
res = 3 << f1
assert res.average == (3, 1.2)
res = (1.2, 3) << f1 << 3
assert res.average == (1.2, 3, 1.2, 3)
# Exceptions
with pytest.raises(TypeError):
res = 'a' << f1
with pytest.raises(TypeError):
res = f1 << 'a'
# Fields defined on different meshes
mesh1 = df.Mesh(p1=(0, 0, 0), p2=(5, 5, 5), n=(1, 1, 1))
mesh2 = df.Mesh(p1=(0, 0, 0), p2=(3, 3, 3), n=(1, 1, 1))
f1 = df.Field(mesh1, dim=1, value=1.2)
f2 = df.Field(mesh2, dim=1, value=1)
with pytest.raises(ValueError):
res = f1 << f2
def test_all_operators(self):
p1 = (0, 0, 0)
p2 = (5e-9, 5e-9, 10e-9)
n = (2, 2, 1)
mesh = df.Mesh(p1=p1, p2=p2, n=n)
f1 = df.Field(mesh, dim=1, value=2)
f2 = df.Field(mesh, dim=3, value=(-4, 0, 1))
res = ((+f1/2 + f2.x)**2 - 2*f1*3)/(-f2.z) - 2*f2.y + 1/f2.z**2 + f2@f2
assert np.all(res.array == 21)
res = 1 + f1 + 0*f2.x - 3*f2.y/3
assert res.average == 3
def test_pad(self):
p1 = (0, 0, 0)
p2 = (10, 8, 2)
cell = (1, 1, 1)
mesh = df.Mesh(p1=p1, p2=p2, cell=cell)
field = df.Field(mesh, dim=1, value=1)
pf = field.pad({'x': (1, 1)}, mode='constant') # zeros padded
assert pf.array.shape == (12, 8, 2, 1)
def test_derivative(self):
p1 = (0, 0, 0)
p2 = (10, 10, 10)
cell = (2, 2, 2)
# f(x, y, z) = 0 -> grad(f) = (0, 0, 0)
# No BC
mesh = df.Mesh(p1=p1, p2=p2, cell=cell)
f = df.Field(mesh, dim=1, value=0)
check_field(f.derivative('x'))
assert f.derivative('x', n=1).average == 0
assert f.derivative('y', n=1).average == 0
assert f.derivative('z', n=1).average == 0
assert f.derivative('x', n=2).average == 0
assert f.derivative('y', n=2).average == 0
assert f.derivative('z', n=2).average == 0
# f(x, y, z) = x + y + z -> grad(f) = (1, 1, 1)
# No BC
mesh = df.Mesh(p1=p1, p2=p2, cell=cell)
def value_fun(point):
x, y, z = point
return x + y + z
f = df.Field(mesh, dim=1, value=value_fun)
assert f.derivative('x', n=1).average == 1
assert f.derivative('y', n=1).average == 1
assert f.derivative('z', n=1).average == 1
assert f.derivative('x', n=2).average == 0
assert f.derivative('y', n=2).average == 0
assert f.derivative('z', n=2).average == 0
# f(x, y, z) = x*y + 2*y + x*y*z ->
# grad(f) = (y+y*z, x+2+x*z, x*y)
# No BC
mesh = df.Mesh(p1=p1, p2=p2, cell=cell)
def value_fun(point):
x, y, z = point
return x*y + 2*y + x*y*z
f = df.Field(mesh, dim=1, value=value_fun)
assert f.derivative('x')((7, 5, 1)) == 10
assert f.derivative('y')((7, 5, 1)) == 16
assert f.derivative('z')((7, 5, 1)) == 35
assert f.derivative('x', n=2)((1, 1, 1)) == 0
assert f.derivative('y', n=2)((1, 1, 1)) == 0
assert f.derivative('z', n=2)((1, 1, 1)) == 0
# f(x, y, z) = (0, 0, 0)
# -> dfdx = (0, 0, 0)
# -> dfdy = (0, 0, 0)
# -> dfdz = (0, 0, 0)
# No BC
mesh = df.Mesh(p1=p1, p2=p2, cell=cell)
f = df.Field(mesh, dim=3, value=(0, 0, 0))
check_field(f.derivative('y'))
assert f.derivative('x').average == (0, 0, 0)
assert f.derivative('y').average == (0, 0, 0)
assert f.derivative('z').average == (0, 0, 0)
# f(x, y, z) = (x, y, z)
# -> dfdx = (1, 0, 0)
# -> dfdy = (0, 1, 0)
# -> dfdz = (0, 0, 1)
def value_fun(point):
x, y, z = point
return (x, y, z)
f = df.Field(mesh, dim=3, value=value_fun)
assert f.derivative('x').average == (1, 0, 0)
assert f.derivative('y').average == (0, 1, 0)
assert f.derivative('z').average == (0, 0, 1)
# f(x, y, z) = (x*y, y*z, x*y*z)
# -> dfdx = (y, 0, y*z)
# -> dfdy = (x, z, x*z)
# -> dfdz = (0, y, x*y)
def value_fun(point):
x, y, z = point
return (x*y, y*z, x*y*z)
f = df.Field(mesh, dim=3, value=value_fun)
assert f.derivative('x')((3, 1, 3)) == (1, 0, 3)
assert f.derivative('y')((3, 1, 3)) == (3, 3, 9)
assert f.derivative('z')((3, 1, 3)) == (0, 1, 3)
assert f.derivative('x')((5, 3, 5)) == (3, 0, 15)
assert f.derivative('y')((5, 3, 5)) == (5, 5, 25)
assert f.derivative('z')((5, 3, 5)) == (0, 3, 15)
# f(x, y, z) = (3+x*y, x-2*y, x*y*z)
# -> dfdx = (y, 1, y*z)
# -> dfdy = (x, -2, x*z)
# -> dfdz = (0, 0, x*y)
def value_fun(point):
x, y, z = point
return (3+x*y, x-2*y, x*y*z)
f = df.Field(mesh, dim=3, value=value_fun)
assert f.derivative('x')((7, 5, 1)) == (5, 1, 5)
assert f.derivative('y')((7, 5, 1)) == (7, -2, 7)
assert f.derivative('z')((7, 5, 1)) == (0, 0, 35)
# f(x, y, z) = 2*x*x + 2*y*y + 3*z*z
# -> grad(f) = (4, 4, 6)
def value_fun(point):
x, y, z = point
return 2*x*x + 2*y*y + 3*z*z
f = df.Field(mesh, dim=1, value=value_fun)
assert f.derivative('x', n=2).average == 4
assert f.derivative('y', n=2).average == 4
assert f.derivative('z', n=2).average == 6
# f(x, y, z) = (2*x*x, 2*y*y, 3*z*z)
def value_fun(point):
x, y, z = point
return (2*x*x, 2*y*y, 3*z*z)
f = df.Field(mesh, dim=3, value=value_fun)
assert f.derivative('x', n=2).average == (4, 0, 0)
assert f.derivative('y', n=2).average == (0, 4, 0)
assert f.derivative('z', n=2).average == (0, 0, 6)
with pytest.raises(NotImplementedError):
res = f.derivative('x', n=3)
def test_derivative_pbc(self):
p1 = (0, 0, 0)
p2 = (10, 8, 6)
cell = (2, 2, 2)
mesh_nopbc = df.Mesh(p1=p1, p2=p2, cell=cell)
mesh_pbc = df.Mesh(p1=p1, p2=p2, cell=cell, bc='xyz')
# Scalar field
def value_fun(point):
return point[0]*point[1]*point[2]
# No PBC
f = df.Field(mesh_nopbc, dim=1, value=value_fun)
assert f.derivative('x')((9, 1, 1)) == 1
assert f.derivative('y')((1, 7, 1)) == 1
assert f.derivative('z')((1, 1, 5)) == 1
# PBC
f = df.Field(mesh_pbc, dim=1, value=value_fun)
assert f.derivative('x')((9, 1, 1)) == -1.5
assert f.derivative('y')((1, 7, 1)) == -1
assert f.derivative('z')((1, 1, 5)) == -0.5
# Vector field
def value_fun(point):
return (point[0]*point[1]*point[2],) * 3
# No PBC
f = df.Field(mesh_nopbc, dim=3, value=value_fun)
assert f.derivative('x')((9, 1, 1)) == (1, 1, 1)
assert f.derivative('y')((1, 7, 1)) == (1, 1, 1)
assert f.derivative('z')((1, 1, 5)) == (1, 1, 1)
# PBC
f = df.Field(mesh_pbc, dim=3, value=value_fun)
assert f.derivative('x')((9, 1, 1)) == (-1.5, -1.5, -1.5)
assert f.derivative('y')((1, 7, 1)) == (-1, -1, -1)
assert f.derivative('z')((1, 1, 5)) == (-0.5, -0.5, -0.5)
def test_derivative_neumann(self):
p1 = (0, 0, 0)
p2 = (10, 8, 6)
cell = (2, 2, 2)
mesh_noneumann = df.Mesh(p1=p1, p2=p2, cell=cell)
mesh_neumann = df.Mesh(p1=p1, p2=p2, cell=cell, bc='neumann')
# Scalar field
def value_fun(point):
return point[0]*point[1]*point[2]
# No Neumann
f1 = df.Field(mesh_noneumann, dim=1, value=value_fun)
assert f1.derivative('x')((9, 1, 1)) == 1
assert f1.derivative('y')((1, 7, 1)) == 1
assert f1.derivative('z')((1, 1, 5)) == 1
# Neumann
f2 = df.Field(mesh_neumann, dim=1, value=value_fun)
assert (f1.derivative('x')(f1.mesh.region.centre) ==
f2.derivative('x')(f2.mesh.region.centre))
assert (f1.derivative('x')((1, 7, 1)) !=
f2.derivative('x')((1, 7, 1)))
def test_derivative_single_cell(self):
p1 = (0, 0, 0)
p2 = (10, 10, 2)
cell = (2, 2, 2)
mesh = df.Mesh(p1=p1, p2=p2, cell=cell)
# Scalar field: f(x, y, z) = x + y + z
# -> grad(f) = (1, 1, 1)
def value_fun(point):
x, y, z = point
return x + y + z
f = df.Field(mesh, dim=1, value=value_fun)
# only one cell in the z-direction
assert f.plane('x').derivative('x').average == 0
assert f.plane('y').derivative('y').average == 0
assert f.derivative('z').average == 0
# Vector field: f(x, y, z) = (x, y, z)
# -> grad(f) = (1, 1, 1)
def value_fun(point):
x, y, z = point
return (x, y, z)
f = df.Field(mesh, dim=3, value=value_fun)
# only one cell in the z-direction
assert f.plane('x').derivative('x').average == (0, 0, 0)
assert f.plane('y').derivative('y').average == (0, 0, 0)
assert f.derivative('z').average == (0, 0, 0)
def test_grad(self):
p1 = (0, 0, 0)
p2 = (10, 10, 10)
cell = (2, 2, 2)
mesh = df.Mesh(p1=p1, p2=p2, cell=cell)
# f(x, y, z) = 0 -> grad(f) = (0, 0, 0)
f = df.Field(mesh, dim=1, value=0)
check_field(f.grad)
assert f.grad.average == (0, 0, 0)
# f(x, y, z) = x + y + z -> grad(f) = (1, 1, 1)
def value_fun(point):
x, y, z = point
return x + y + z
f = df.Field(mesh, dim=1, value=value_fun)
assert f.grad.average == (1, 1, 1)
# f(x, y, z) = x*y + y + z -> grad(f) = (y, x+1, 1)
def value_fun(point):
x, y, z = point
return x*y + y + z
f = df.Field(mesh, dim=1, value=value_fun)
assert f.grad((3, 1, 3)) == (1, 4, 1)
assert f.grad((5, 3, 5)) == (3, 6, 1)
# f(x, y, z) = x*y + 2*y + x*y*z ->
# grad(f) = (y+y*z, x+2+x*z, x*y)
def value_fun(point):
x, y, z = point
return x*y + 2*y + x*y*z
f = df.Field(mesh, dim=1, value=value_fun)
assert f.grad((7, 5, 1)) == (10, 16, 35)
assert f.grad.x == f.derivative('x')
assert f.grad.y == f.derivative('y')
assert f.grad.z == f.derivative('z')
# Exception
f = df.Field(mesh, dim=3, value=(1, 2, 3))
with pytest.raises(ValueError):
res = f.grad
def test_div_curl(self):
p1 = (0, 0, 0)
p2 = (10, 10, 10)
cell = (2, 2, 2)
mesh = df.Mesh(p1=p1, p2=p2, cell=cell)
# f(x, y, z) = (0, 0, 0)
# -> div(f) = 0
# -> curl(f) = (0, 0, 0)
f = df.Field(mesh, dim=3, value=(0, 0, 0))
check_field(f.div)
assert f.div.dim == 1
assert f.div.average == 0
check_field(f.curl)
assert f.curl.dim == 3
assert f.curl.average == (0, 0, 0)
# f(x, y, z) = (x, y, z)
# -> div(f) = 3
# -> curl(f) = (0, 0, 0)
def value_fun(point):
x, y, z = point
return (x, y, z)
f = df.Field(mesh, dim=3, value=value_fun)
assert f.div.average == 3
assert f.curl.average == (0, 0, 0)
# f(x, y, z) = (x*y, y*z, x*y*z)
# -> div(f) = y + z + x*y
# -> curl(f) = (x*z-y, -y*z, -x)
def value_fun(point):
x, y, z = point
return (x*y, y*z, x*y*z)
f = df.Field(mesh, dim=3, value=value_fun)
assert f.div((3, 1, 3)) == 7
assert f.div((5, 3, 5)) == 23
assert f.curl((3, 1, 3)) == (8, -3, -3)
assert f.curl((5, 3, 5)) == (22, -15, -5)
# f(x, y, z) = (3+x*y, x-2*y, x*y*z)
# -> div(f) = y - 2 + x*y
# -> curl(f) = (x*z, -y*z, 1-x)
def value_fun(point):
x, y, z = point
return (3+x*y, x-2*y, x*y*z)
f = df.Field(mesh, dim=3, value=value_fun)
assert f.div((7, 5, 1)) == 38
assert f.curl((7, 5, 1)) == (7, -5, -6)
# Exception
f = df.Field(mesh, dim=1, value=3.11)
with pytest.raises(ValueError):
res = f.div
with pytest.raises(ValueError):
res = f.curl
def test_laplace(self):
p1 = (0, 0, 0)
p2 = (10, 10, 10)
cell = (2, 2, 2)
mesh = df.Mesh(p1=p1, p2=p2, cell=cell)
# f(x, y, z) = (0, 0, 0)
# -> laplace(f) = 0
f = df.Field(mesh, dim=3, value=(0, 0, 0))
check_field(f.laplace)
assert f.laplace.dim == 3
assert f.laplace.average == (0, 0, 0)
# f(x, y, z) = x + y + z
# -> laplace(f) = 0
def value_fun(point):
x, y, z = point
return x + y + z
f = df.Field(mesh, dim=1, value=value_fun)
check_field(f.laplace)
assert f.laplace.average == 0
# f(x, y, z) = 2*x*x + 2*y*y + 3*z*z
# -> laplace(f) = 4 + 4 + 6 = 14
def value_fun(point):
x, y, z = point
return 2*x*x + 2*y*y + 3*z*z
f = df.Field(mesh, dim=1, value=value_fun)
assert f.laplace.average == 14
# f(x, y, z) = (2*x*x, 2*y*y, 3*z*z)
# -> laplace(f) = (4, 4, 6)
def value_fun(point):
x, y, z = point
return (2*x*x, 2*y*y, 3*z*z)
f = df.Field(mesh, dim=3, value=value_fun)
assert f.laplace.average == (4, 4, 6)
def test_integral(self):
# Volume integral.
p1 = (0, 0, 0)
p2 = (10, 10, 10)
cell = (1, 1, 1)
mesh = df.Mesh(p1=p1, p2=p2, cell=cell)
f = df.Field(mesh, dim=1, value=0)
assert (f * df.dV).integral() == 0
assert (f * df.dx*df.dy*df.dz).integral() == 0
f = df.Field(mesh, dim=1, value=2)
assert (f * df.dV).integral() == 2000
assert (f * df.dx*df.dy*df.dz).integral() == 2000
f = df.Field(mesh, dim=3, value=(-1, 0, 3))
assert (f * df.dV).integral() == (-1000, 0, 3000)
assert (f * df.dx*df.dy*df.dz).integral() == (-1000, 0, 3000)
def value_fun(point):
x, y, z = point
if x <= 5:
return (-1, -2, -3)
else:
return (1, 2, 3)
f = df.Field(mesh, dim=3, value=value_fun)
assert (f * df.dV).integral() == (0, 0, 0)
assert (f * df.dx*df.dy*df.dz).integral() == (0, 0, 0)
# Surface integral.
p1 = (0, 0, 0)
p2 = (10, 5, 3)
cell = (1, 1, 1)
mesh = df.Mesh(p1=p1, p2=p2, cell=cell)
f = df.Field(mesh, dim=1, value=0)
assert (f.plane('x') * abs(df.dS)).integral() == 0
assert (f.plane('x') * df.dy*df.dz).integral() == 0
f = df.Field(mesh, dim=1, value=2)
assert (f.plane('x') * abs(df.dS)).integral() == 30
assert (f.plane('x') * df.dy*df.dz).integral() == 30
assert (f.plane('y') * abs(df.dS)).integral() == 60
assert (f.plane('y') * df.dx*df.dz).integral() == 60
assert (f.plane('z') * abs(df.dS)).integral() == 100
assert (f.plane('z') * df.dx*df.dy).integral() == 100
f = df.Field(mesh, dim=3, value=(-1, 0, 3))
assert (f.plane('x') * abs(df.dS)).integral() == (-15, 0, 45)
assert (f.plane('y') * abs(df.dS)).integral() == (-30, 0, 90)
assert (f.plane('z') * abs(df.dS)).integral() == (-50, 0, 150)
f = df.Field(mesh, dim=3, value=(-1, 0, 3))
assert df.integral(f.plane('x') @ df.dS) == -15
assert df.integral(f.plane('y') @ df.dS) == 0
assert df.integral(f.plane('z') @ df.dS) == 150
# Directional integral
p1 = (0, 0, 0)
p2 = (10, 10, 10)
cell = (1, 1, 1)
mesh = df.Mesh(p1=p1, p2=p2, cell=cell)
f = df.Field(mesh, dim=3, value=(1, 1, 1))
f = f.integral(direction='x')
assert isinstance(f, df.Field)
assert f.dim == 3
assert f.mesh.n == (1, 10, 10)
assert f.average == (10, 10, 10)
f = f.integral(direction='x').integral(direction='y')
assert isinstance(f, df.Field)
assert f.dim == 3
assert f.mesh.n == (1, 1, 10)
assert f.average == (100, 100, 100)
f = f.integral('x').integral('y').integral('z')
assert f.dim == 3
assert f.mesh.n == (1, 1, 1)
assert f.average == (1000, 1000, 1000)
assert (f.integral('x').integral('y').integral('z').average ==
f.integral())
# Improper integral
p1 = (0, 0, 0)
p2 = (10, 10, 10)
cell = (1, 1, 1)
mesh = df.Mesh(p1=p1, p2=p2, cell=cell)
f = df.Field(mesh, dim=3, value=(1, 1, 1))
f = f.integral(direction='x', improper=True)
assert isinstance(f, df.Field)
assert f.dim == 3
assert f.mesh.n == (10, 10, 10)
assert f.average == (5.5, 5.5, 5.5)
assert f((0, 0, 0)) == (1, 1, 1)
assert f((10, 10, 10)) == (10, 10, 10)
# Exceptions
with pytest.raises(ValueError):
res = f.integral(direction='xy', improper=True)
def test_line(self):
mesh = df.Mesh(p1=(0, 0, 0), p2=(10, 10, 10), n=(10, 10, 10))
f = df.Field(mesh, dim=3, value=(1, 2, 3))
check_field(f)
line = f.line(p1=(0, 0, 0), p2=(5, 5, 5), n=20)
assert isinstance(line, df.Line)
assert line.n == 20
assert line.dim == 3
def test_plane(self):
for mesh, direction in itertools.product(self.meshes, ['x', 'y', 'z']):
f = df.Field(mesh, dim=1, value=3)
check_field(f)
plane = f.plane(direction, n=(3, 3))
assert isinstance(plane, df.Field)
p, v = zip(*list(plane))
assert len(p) == 9
assert len(v) == 9
def test_getitem(self):
p1 = (0, 0, 0)
p2 = (90, 50, 10)
cell = (5, 5, 5)
subregions = {'r1': df.Region(p1=(0, 0, 0), p2=(30, 50, 10)),
'r2': df.Region(p1=(30, 0, 0), p2=(90, 50, 10))}
mesh = df.Mesh(p1=p1, p2=p2, cell=cell, subregions=subregions)
def value_fun(point):
x, y, z = point
if x <= 60:
return (-1, -2, -3)
else:
return (1, 2, 3)
f = df.Field(mesh, dim=3, value=value_fun)
check_field(f)
check_field(f['r1'])
check_field(f['r2'])
check_field(f[subregions['r1']])
check_field(f[subregions['r2']])
assert f['r1'].average == (-1, -2, -3)
assert f['r2'].average == (0, 0, 0)
assert f[subregions['r1']].average == (-1, -2, -3)
assert f[subregions['r2']].average == (0, 0, 0)
assert len(f['r1'].mesh) + len(f['r2'].mesh) == len(f.mesh)
# Meshes are not aligned
subregion = df.Region(p1=(1.1, 0, 0), p2=(9.9, 15, 5))
assert f[subregion].array.shape == (2, 3, 1, 3)
def test_project(self):
p1 = (-5, -5, -5)
p2 = (5, 5, 5)
cell = (1, 1, 1)
mesh = df.Mesh(p1=p1, p2=p2, cell=cell)
# Constant scalar field
f = df.Field(mesh, dim=1, value=5)
check_field(f)
assert f.project('x').array.shape == (1, 10, 10, 1)
assert f.project('y').array.shape == (10, 1, 10, 1)
assert f.project('z').array.shape == (10, 10, 1, 1)
# Constant vector field
f = df.Field(mesh, dim=3, value=(1, 2, 3))
assert f.project('x').array.shape == (1, 10, 10, 3)
assert f.project('y').array.shape == (10, 1, 10, 3)
assert f.project('z').array.shape == (10, 10, 1, 3)
# Spatially varying scalar field
def value_fun(point):
x, y, z = point
if z <= 0:
return 1
else:
return -1
f = df.Field(mesh, dim=1, value=value_fun)
sf = f.project('z')
assert sf.array.shape == (10, 10, 1, 1)
assert sf.average == 0
# Spatially varying vector field
def value_fun(point):
x, y, z = point
if z <= 0:
return (3, 2, 1)
else:
return (3, 2, -1)
f = df.Field(mesh, dim=3, value=value_fun)
sf = f.project('z')
assert sf.array.shape == (10, 10, 1, 3)
assert sf.average == (3, 2, 0)
def test_angle(self):
p1 = (0, 0, 0)
p2 = (8e-9, 2e-9, 2e-9)
cell = (2e-9, 2e-9, 2e-9)
mesh = df.Mesh(region=df.Region(p1=p1, p2=p2), cell=cell)
def value_fun(point):
x, y, z = point
if x < 2e-9:
return (1, 1, 1)
elif 2e-9 <= x < 4e-9:
return (1, -1, 0)
elif 4e-9 <= x < 6e-9:
return (-1, -1, 0)
elif 6e-9 <= x < 8e-9:
return (-1, 1, 0)
f = df.Field(mesh, dim=3, value=value_fun)
assert abs(f.plane('z').angle((1e-9, 2e-9, 2e-9)) - np.pi/4) < 1e-3
assert abs(f.plane('z').angle((3e-9, 2e-9, 2e-9)) - 7*np.pi/4) < 1e-3
assert abs(f.plane('z').angle((5e-9, 2e-9, 2e-9)) - 5*np.pi/4) < 1e-3
assert abs(f.plane('z').angle((7e-9, 2e-9, 2e-9)) - 3*np.pi/4) < 1e-3
# Exception
with pytest.raises(ValueError):
res = f.angle # the field is not sliced
def test_write_read_ovf(self):
representations = ['txt', 'bin4', 'bin8']
filename = 'testfile.ovf'
p1 = (0, 0, 0)
p2 = (8e-9, 5e-9, 3e-9)
cell = (1e-9, 1e-9, 1e-9)
mesh = df.Mesh(region=df.Region(p1=p1, p2=p2), cell=cell)
# Write/read
for dim, value in [(1, lambda point: point[0] + point[1] + point[2]),
(3, lambda point: (point[0], point[1], point[2]))]:
f = df.Field(mesh, dim=dim, value=value)
for rep in representations:
with tempfile.TemporaryDirectory() as tmpdir:
tmpfilename = os.path.join(tmpdir, filename)
f.write(tmpfilename, representation=rep)
f_read = df.Field.fromfile(tmpfilename)
assert f.allclose(f_read)
# Extend scalar
for rep in representations:
f = df.Field(mesh, dim=1,
value=lambda point: point[0]+point[1]+point[2])
with tempfile.TemporaryDirectory() as tmpdir:
tmpfilename = os.path.join(tmpdir, filename)
f.write(tmpfilename, extend_scalar=True)
f_read = df.Field.fromfile(tmpfilename)
assert f.allclose(f_read.x)
# Read different OOMMF representations
# (OVF1, OVF2) x (txt, bin4, bin8)
filenames = ['oommf-ovf2-txt.omf',
'oommf-ovf2-bin4.omf',
'oommf-ovf2-bin8.omf',
'oommf-ovf1-txt.omf',
'oommf-ovf1-bin4.omf',
'oommf-ovf1-bin8.omf']
dirname = os.path.join(os.path.dirname(__file__), 'test_sample')
for filename in filenames:
omffilename = os.path.join(dirname, filename)
f_read = df.Field.fromfile(omffilename)
if 'ovf2' in filename:
# The magnetisation is in the x-direction in OVF2 files.
assert abs(f_read.orientation.x.average - 1) < 1e-2
else:
# The norm of magnetisation is known.
assert abs(f_read.norm.average - 1261566.2610100) < 1e-3
# Read different mumax3 bin4 files (made on linux and windows)
filenames = ['mumax-bin4-linux.ovf', 'mumax-bin4-windows.ovf']
dirname = os.path.join(os.path.dirname(__file__), 'test_sample')
for filename in filenames:
omffilename = os.path.join(dirname, filename)
f_read = df.Field.fromfile(omffilename)
# We know the saved magentisation.
f_saved = df.Field(f_read.mesh, dim=3, value=(1, 0.1, 0), norm=1)
assert f_saved.allclose(f_read)
# Exception (dim=2)
f = df.Field(mesh, dim=2, value=(1, 2))
with pytest.raises(TypeError) as excinfo:
f.write(filename)
def test_write_read_vtk(self):
filename = 'testfile.vtk'
p1 = (0, 0, 0)
p2 = (1e-9, 2e-9, 1e-9)
cell = (1e-9, 1e-9, 1e-9)
mesh = df.Mesh(region=df.Region(p1=p1, p2=p2), cell=cell)
for dim, value in [(1, -1.2), (3, (1e-3, -5e6, 5e6))]:
f = df.Field(mesh, dim=dim, value=value)
with tempfile.TemporaryDirectory() as tmpdir:
tmpfilename = os.path.join(tmpdir, filename)
f.write(tmpfilename)
f_read = df.Field.fromfile(tmpfilename)
assert np.allclose(f.array, f_read.array)
assert np.allclose(f.mesh.region.pmin, f_read.mesh.region.pmin)
assert np.allclose(f.mesh.region.pmax, f_read.mesh.region.pmax)
assert np.allclose(f.mesh.cell, f_read.mesh.cell)
assert f.mesh.n == f_read.mesh.n
def test_write_read_hdf5(self):
filenames = ['testfile.hdf5', 'testfile.h5']
p1 = (0, 0, 0)
p2 = (10e-12, 5e-12, 5e-12)
cell = (1e-12, 1e-12, 1e-12)
mesh = df.Mesh(region=df.Region(p1=p1, p2=p2), cell=cell)
for dim, value in [(1, -1.23), (3, (1e-3 + np.pi, -5e6, 6e6))]:
f = df.Field(mesh, dim=dim, value=value)
for filename in filenames:
with tempfile.TemporaryDirectory() as tmpdir:
tmpfilename = os.path.join(tmpdir, filename)
f.write(tmpfilename)
f_read = df.Field.fromfile(tmpfilename)
assert f == f_read
def test_read_write_invalid_extension(self):
filename = 'testfile.jpg'
p1 = (0, 0, 0)
p2 = (10e-12, 5e-12, 3e-12)
cell = (1e-12, 1e-12, 1e-12)
mesh = df.Mesh(region=df.Region(p1=p1, p2=p2), cell=cell)
f = df.Field(mesh, dim=1, value=5e-12)
with pytest.raises(ValueError) as excinfo:
f.write(filename)
with pytest.raises(ValueError) as excinfo:
f = df.Field.fromfile(filename)
def test_mpl_scalar(self):
# No axes
self.pf.x.plane('x', n=(3, 4)).mpl_scalar()
# Axes
fig = plt.figure()
ax = fig.add_subplot(111)
self.pf.x.plane('x', n=(3, 4)).mpl_scalar(ax=ax)
# All arguments
self.pf.x.plane('x').mpl_scalar(figsize=(10, 10),
filter_field=self.pf.norm,
colorbar=True,
colorbar_label='something',
multiplier=1e-6, cmap='hsv',
clim=(-1, 1))
# Lightness field
filenames = ['skyrmion.omf', 'skyrmion-disk.omf']
for i in filenames:
filename = os.path.join(os.path.dirname(__file__),
'test_sample', i)
field = df.Field.fromfile(filename)
field.plane('z').angle.mpl_scalar(lightness_field=field.z)
field.plane('z').angle.mpl_scalar(lightness_field=-field.z,
filter_field=field.norm)
field.plane('z').mpl(scalar_lightness_field=-field.z)
# Saving plot
filename = 'testfigure.pdf'
with tempfile.TemporaryDirectory() as tmpdir:
tmpfilename = os.path.join(tmpdir, filename)
self.pf.x.plane('x', n=(3, 4)).mpl_scalar(filename=tmpfilename)
# Exceptions
with pytest.raises(ValueError):
self.pf.x.mpl_scalar() # not sliced
with pytest.raises(ValueError):
self.pf.plane('z').mpl_scalar() # vector field
with pytest.raises(ValueError):
# wrong filter field
self.pf.x.plane('z').mpl_scalar(filter_field=self.pf)
with pytest.raises(ValueError):
# wrong filter field
self.pf.x.plane('z').mpl_scalar(lightness_field=self.pf)
plt.close('all')
def test_mpl_vector(self):
# No axes
self.pf.plane('x', n=(3, 4)).mpl_vector()
# Axes
fig = plt.figure()
ax = fig.add_subplot(111)
self.pf.plane('x', n=(3, 4)).mpl_vector(ax=ax)
# All arguments
self.pf.plane('x').mpl_vector(figsize=(10, 10),
color_field=self.pf.y,
colorbar=True,
colorbar_label='something',
multiplier=1e-6, cmap='hsv',
clim=(-1, 1))
# Saving plot
filename = 'testfigure.pdf'
with tempfile.TemporaryDirectory() as tmpdir:
tmpfilename = os.path.join(tmpdir, filename)
self.pf.plane('x', n=(3, 4)).mpl_vector(filename=tmpfilename)
# Exceptions
with pytest.raises(ValueError) as excinfo:
self.pf.mpl_vector() # not sliced
with pytest.raises(ValueError) as excinfo:
self.pf.y.plane('z').mpl_vector() # scalar field
with pytest.raises(ValueError) as excinfo:
# wrong color field
self.pf.plane('z').mpl_vector(color_field=self.pf)
plt.close('all')
def test_mpl(self):
# No axes
self.pf.plane('x', n=(3, 4)).mpl()
# Axes
fig = plt.figure()
ax = fig.add_subplot(111)
self.pf.x.plane('x', n=(3, 4)).mpl(ax=ax)
# All arguments for a vector field
self.pf.plane('x').mpl(figsize=(12, 6),
scalar_field=self.pf.plane('x').angle,
scalar_filter_field=self.pf.norm,
scalar_colorbar_label='something',
scalar_cmap='twilight',
vector_field=self.pf,
vector_color_field=self.pf.y,
vector_color=True,
vector_colorbar=True,
vector_colorbar_label='vector',
vector_cmap='hsv', vector_clim=(0, 1e6),
multiplier=1e-12)
# All arguments for a scalar field
self.pf.z.plane('x').mpl(figsize=(12, 6),
scalar_field=self.pf.x,
scalar_filter_field=self.pf.norm,
scalar_colorbar_label='something',
scalar_cmap='twilight',
vector_field=self.pf,
vector_color_field=self.pf.y,
vector_color=True,
vector_colorbar=True,
vector_colorbar_label='vector',
vector_cmap='hsv', vector_clim=(0, 1e6),
multiplier=1e-12)
# Saving plot
filename = 'testfigure.pdf'
with tempfile.TemporaryDirectory() as tmpdir:
tmpfilename = os.path.join(tmpdir, filename)
self.pf.plane('x', n=(3, 4)).mpl(filename=tmpfilename)
# Exception
with pytest.raises(ValueError):
self.pf.mpl()
plt.close('all')
def test_k3d_nonzero(self):
# Default
self.pf.norm.k3d_nonzero()
# Color
self.pf.x.k3d_nonzero(color=0xff00ff)
# Multiplier
self.pf.x.k3d_nonzero(color=0xff00ff, multiplier=1e-6)
# Interactive field
self.pf.x.plane('z').k3d_nonzero(color=0xff00ff,
multiplier=1e-6,
interactive_field=self.pf)
# kwargs
self.pf.x.plane('z').k3d_nonzero(color=0xff00ff,
multiplier=1e-6,
interactive_field=self.pf,
wireframe=True)
# Plot
plot = k3d.plot()
plot.display()
self.pf.x.plane(z=0).k3d_nonzero(plot=plot,
color=0xff00ff,
multiplier=1e-6,
interactive_field=self.pf)
# Continuation for interactive plot testing.
self.pf.x.plane(z=1e-9).k3d_nonzero(plot=plot,
color=0xff00ff,
multiplier=1e-6,
interactive_field=self.pf)
assert len(plot.objects) == 2
with pytest.raises(ValueError) as excinfo:
self.pf.k3d_nonzero()
def test_k3d_scalar(self):
# Default
self.pf.y.k3d_scalar()
# Filter field
self.pf.y.k3d_scalar(filter_field=self.pf.norm)
# Colormap
self.pf.x.k3d_scalar(filter_field=self.pf.norm,
cmap='hsv',
color=0xff00ff)
# Multiplier
self.pf.y.k3d_scalar(filter_field=self.pf.norm,
color=0xff00ff,
multiplier=1e-6)
# Interactive field
self.pf.y.k3d_scalar(filter_field=self.pf.norm,
color=0xff00ff,
multiplier=1e-6,
interactive_field=self.pf)
# kwargs
self.pf.y.k3d_scalar(filter_field=self.pf.norm,
color=0xff00ff,
multiplier=1e-6,
interactive_field=self.pf,
wireframe=True)
# Plot
plot = k3d.plot()
plot.display()
self.pf.y.plane(z=0).k3d_scalar(plot=plot,
filter_field=self.pf.norm,
color=0xff00ff,
multiplier=1e-6,
interactive_field=self.pf)
# Continuation for interactive plot testing.
self.pf.y.plane(z=1e-9).k3d_scalar(plot=plot,
filter_field=self.pf.norm,
color=0xff00ff,
multiplier=1e-6,
interactive_field=self.pf)
assert len(plot.objects) == 2
# Exceptions
with pytest.raises(ValueError) as excinfo:
self.pf.k3d_scalar()
with pytest.raises(ValueError):
self.pf.x.k3d_scalar(filter_field=self.pf) # filter field dim=3
def test_k3d_vector(self):
# Default
self.pf.k3d_vector()
# Color field
self.pf.k3d_vector(color_field=self.pf.x)
# Colormap
self.pf.k3d_vector(color_field=self.pf.norm,
cmap='hsv')
# Head size
self.pf.k3d_vector(color_field=self.pf.norm,
cmap='hsv',
head_size=3)
# Points
self.pf.k3d_vector(color_field=self.pf.norm,
cmap='hsv',
head_size=3,
points=False)
# Point size
self.pf.k3d_vector(color_field=self.pf.norm,
cmap='hsv',
head_size=3,
points=False,
point_size=1)
# Vector multiplier
self.pf.k3d_vector(color_field=self.pf.norm,
cmap='hsv',
head_size=3,
points=False,
point_size=1,
vector_multiplier=1)
# Multiplier
self.pf.k3d_vector(color_field=self.pf.norm,
cmap='hsv',
head_size=3,
points=False,
point_size=1,
vector_multiplier=1,
multiplier=1e-6)
# Interactive field
self.pf.plane('z').k3d_vector(color_field=self.pf.norm,
cmap='hsv',
head_size=3,
points=False,
point_size=1,
vector_multiplier=1,
multiplier=1e-6,
interactive_field=self.pf)
# Plot
plot = k3d.plot()
plot.display()
self.pf.plane(z=0).k3d_vector(plot=plot, interactive_field=self.pf)
# Continuation for interactive plot testing.
self.pf.plane(z=1e-9).k3d_vector(plot=plot, interactive_field=self.pf)
assert len(plot.objects) == 3
# Exceptions
with pytest.raises(ValueError) as excinfo:
self.pf.x.k3d_vector()
with pytest.raises(ValueError):
self.pf.k3d_vector(color_field=self.pf) # filter field dim=3
def test_plot_large_sample(self):
p1 = (0, 0, 0)
p2 = (50e9, 50e9, 50e9)
cell = (25e9, 25e9, 25e9)
mesh = df.Mesh(p1=p1, p2=p2, cell=cell)
value = (1e6, 1e6, 1e6)
field = df.Field(mesh, dim=3, value=value)
field.plane('z').mpl()
field.norm.k3d_nonzero()
field.x.k3d_scalar()
field.k3d_vector()
| 64,589 | -5 | 1,449 |
d991aedad470b351e70cf5b10b085c74cc95e474 | 462 | py | Python | env/Lib/site-packages/values/__init__.py | KaceyHirth/Library-DBMS-System | 40b425ed5c7b46627b7c48724b2d20e7a64cf025 | [
"MIT"
] | 4 | 2022-02-06T00:54:58.000Z | 2022-02-25T12:44:43.000Z | env/Lib/site-packages/values/__init__.py | KaceyHirth/Library-DBMS-System | 40b425ed5c7b46627b7c48724b2d20e7a64cf025 | [
"MIT"
] | 3 | 2021-03-23T04:58:47.000Z | 2021-04-02T02:40:54.000Z | env/Lib/site-packages/values/__init__.py | KaceyHirth/Library-DBMS-System | 40b425ed5c7b46627b7c48724b2d20e7a64cf025 | [
"MIT"
] | 1 | 2022-02-08T13:43:20.000Z | 2022-02-08T13:43:20.000Z | __all__ = ['get']
import collections
def get(input):
"""return a list with input values or [] if input is None"""
if input is None:
return []
if not _iterable(input) or _string(input):
return [input]
return list(input)
| 18.48 | 64 | 0.645022 | __all__ = ['get']
import collections
def _iterable(obj):
return isinstance(obj, collections.Iterable)
def _string(value):
try:
return isinstance(value, basestring)
except NameError:
return isinstance(value, str)
def get(input):
"""return a list with input values or [] if input is None"""
if input is None:
return []
if not _iterable(input) or _string(input):
return [input]
return list(input)
| 159 | 0 | 46 |
d0e19b396bd5c3861e79601ace321dbbd96d9384 | 165 | py | Python | vnpy/app/strategy_reviewer/ui/__init__.py | xyh888/vnpy | 7b51716928ab9574f171a2eda190b37b4f393bb1 | [
"MIT"
] | 5 | 2019-05-24T05:19:55.000Z | 2020-07-29T13:21:49.000Z | vnpy/app/strategy_reviewer/ui/__init__.py | xyh888/vnpy | 7b51716928ab9574f171a2eda190b37b4f393bb1 | [
"MIT"
] | null | null | null | vnpy/app/strategy_reviewer/ui/__init__.py | xyh888/vnpy | 7b51716928ab9574f171a2eda190b37b4f393bb1 | [
"MIT"
] | 2 | 2019-07-01T02:14:04.000Z | 2020-07-29T13:21:53.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 2019/8/20 0020 16:49
# @Author : Hadrianl
# @File : __init__.py
from .widget import StrategyReviewer | 23.571429 | 36 | 0.630303 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 2019/8/20 0020 16:49
# @Author : Hadrianl
# @File : __init__.py
from .widget import StrategyReviewer | 0 | 0 | 0 |
4a04e22adafbd1373a9d9fc82325fd3d15005b8b | 647 | py | Python | Lesson 13.gf/xml_Leader2.py | gfoo003/programming-together | 225e0a2255dd8da1f1ef32d2a88deea27c050f10 | [
"MIT"
] | null | null | null | Lesson 13.gf/xml_Leader2.py | gfoo003/programming-together | 225e0a2255dd8da1f1ef32d2a88deea27c050f10 | [
"MIT"
] | null | null | null | Lesson 13.gf/xml_Leader2.py | gfoo003/programming-together | 225e0a2255dd8da1f1ef32d2a88deea27c050f10 | [
"MIT"
] | null | null | null | import xml.etree.ElementTree as ET
xml_string = '''
<stuff>
<users>
<user x = "2">
<id>001</id>
<name>Chuck</name>
</user>
<user x = "7">
<id>007</id>
<name>Brent</name>
</user>
</users>
</stuff>
'''
root_stuff = ET.fromstring(xml_string)
#don't usually refer to root element
user_elements = root_stuff.findall('users/user')
print ('user count:', len(user_elements))
for user in user_elements:
print('name:', user.find('name').text)
print('id:', user.find('id').text)
print('attribute(x):', user.get('x'))
#to identify attribute use 'get's
| 23.107143 | 48 | 0.565688 | import xml.etree.ElementTree as ET
xml_string = '''
<stuff>
<users>
<user x = "2">
<id>001</id>
<name>Chuck</name>
</user>
<user x = "7">
<id>007</id>
<name>Brent</name>
</user>
</users>
</stuff>
'''
root_stuff = ET.fromstring(xml_string)
#don't usually refer to root element
user_elements = root_stuff.findall('users/user')
print ('user count:', len(user_elements))
for user in user_elements:
print('name:', user.find('name').text)
print('id:', user.find('id').text)
print('attribute(x):', user.get('x'))
#to identify attribute use 'get's
| 0 | 0 | 0 |
bebc974c59298f013c68b5d5e434ba4b2d82a0a8 | 213 | py | Python | 第4章/program/Chapter_4_dummy.py | kingname/SourceCodeOfBook | ab7275108994dca564905818b678bbd2f771c18e | [
"MIT"
] | 274 | 2018-10-01T11:07:25.000Z | 2022-03-17T13:48:45.000Z | 第4章/program/Chapter_4_dummy.py | kingname/SourceCodeOfBook | ab7275108994dca564905818b678bbd2f771c18e | [
"MIT"
] | 6 | 2019-02-28T14:18:21.000Z | 2022-03-02T14:57:39.000Z | 第4章/program/Chapter_4_dummy.py | kingname/SourceCodeOfBook | ab7275108994dca564905818b678bbd2f771c18e | [
"MIT"
] | 110 | 2018-10-16T06:08:37.000Z | 2022-03-16T08:19:29.000Z | from multiprocessing.dummy import Pool
pool = Pool(3)
origin_num = [x for x in range(10)]
result = pool.map(calc_power2, origin_num)
print(f'计算1-10的平方分别为:{result}')
| 16.384615 | 42 | 0.71831 | from multiprocessing.dummy import Pool
def calc_power2(num):
return num * num
pool = Pool(3)
origin_num = [x for x in range(10)]
result = pool.map(calc_power2, origin_num)
print(f'计算1-10的平方分别为:{result}')
| 21 | 0 | 23 |
9c1b437a67fd15632bb77976584935abcfb546e4 | 92 | py | Python | traf_stat/apps.py | bashmak/djing | 8cc0c670600254d288178acd47965f7b3db6856e | [
"Unlicense"
] | 23 | 2017-04-27T20:13:22.000Z | 2022-03-16T12:47:29.000Z | traf_stat/apps.py | bashmak/djing | 8cc0c670600254d288178acd47965f7b3db6856e | [
"Unlicense"
] | 2 | 2017-04-04T15:03:12.000Z | 2021-01-26T15:30:57.000Z | traf_stat/apps.py | bashmak/djing | 8cc0c670600254d288178acd47965f7b3db6856e | [
"Unlicense"
] | 13 | 2017-08-22T16:00:03.000Z | 2022-03-20T03:12:15.000Z | from django.apps import AppConfig
| 15.333333 | 33 | 0.76087 | from django.apps import AppConfig
class TrafStatConfig(AppConfig):
name = 'traf_stat'
| 0 | 34 | 23 |
6996b5b815f2d10dc544bc52eb21ec8c9cd0c496 | 1,424 | py | Python | entries/views.py | acdh-oeaw/vhioe | 83c8bce83d7cb21150f404409477d2cd1c7ee240 | [
"MIT"
] | null | null | null | entries/views.py | acdh-oeaw/vhioe | 83c8bce83d7cb21150f404409477d2cd1c7ee240 | [
"MIT"
] | 10 | 2020-02-11T23:56:16.000Z | 2021-12-13T19:45:38.000Z | entries/views.py | acdh-oeaw/vhioe | 83c8bce83d7cb21150f404409477d2cd1c7ee240 | [
"MIT"
] | null | null | null | from django.core.urlresolvers import reverse
from django.views.generic.detail import DetailView
from django.views.generic.list import ListView
from django.views.generic.edit import CreateView, UpdateView, DeleteView
from django.core.urlresolvers import reverse_lazy
from django.utils.decorators import method_decorator
from django.contrib.auth.decorators import login_required
from .models import Eintrag
from .forms import EintragForm
| 26.867925 | 72 | 0.752809 | from django.core.urlresolvers import reverse
from django.views.generic.detail import DetailView
from django.views.generic.list import ListView
from django.views.generic.edit import CreateView, UpdateView, DeleteView
from django.core.urlresolvers import reverse_lazy
from django.utils.decorators import method_decorator
from django.contrib.auth.decorators import login_required
from .models import Eintrag
from .forms import EintragForm
class EintragDetailView(DetailView):
model = Eintrag
class EintragListView(ListView):
model = Eintrag
class EintragCreate(CreateView):
model = Eintrag
template_name_suffix = '_create'
form_class = EintragForm
success_url = '.'
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(EintragCreate, self).dispatch(*args, **kwargs)
class EintragUpdate(UpdateView):
model = Eintrag
form_class = EintragForm
template_name_suffix = '_create'
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(EintragUpdate, self).dispatch(*args, **kwargs)
class EintragDelete(DeleteView):
model = Eintrag
template_name = 'vocabs/confirm_delete.html'
success_url = reverse_lazy('browsing:browse_entries')
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(EintragDelete, self).dispatch(*args, **kwargs)
| 249 | 619 | 115 |
cee8341ee37a27bddc6bb669594ab3c522880752 | 11,688 | py | Python | pystiche_papers/li_wand_2016/_loss.py | pystiche/papers | 0d8179dc51f6eda0b27fa525dc0b86b866bc88e1 | [
"BSD-3-Clause"
] | 1 | 2021-09-30T09:30:07.000Z | 2021-09-30T09:30:07.000Z | pystiche_papers/li_wand_2016/_loss.py | pystiche/papers | 0d8179dc51f6eda0b27fa525dc0b86b866bc88e1 | [
"BSD-3-Clause"
] | 20 | 2021-10-10T13:37:25.000Z | 2022-03-31T07:31:45.000Z | pystiche_papers/li_wand_2016/_loss.py | pystiche/papers | 0d8179dc51f6eda0b27fa525dc0b86b866bc88e1 | [
"BSD-3-Clause"
] | null | null | null | from typing import Any, Optional, Tuple, Union
import torch
from torch.nn.functional import mse_loss
import pystiche
import pystiche.loss.functional as F
from pystiche import enc, loss
from pystiche_papers.utils import HyperParameters
from ._utils import (
extract_normalized_patches2d,
hyper_parameters as _hyper_parameters,
multi_layer_encoder as _multi_layer_encoder,
target_transforms as _target_transforms,
)
__all__ = [
"FeatureReconstructionLoss",
"content_loss",
"MRFLoss",
"style_loss",
"TotalVariationLoss",
"regularization",
"perceptual_loss",
]
class FeatureReconstructionLoss(loss.FeatureReconstructionLoss):
r"""Feature reconstruction loss from :cite:`LW2016`.
Args:
encoder: Encoder used to encode the input.
impl_params: If ``False``, calculate the score with the squared error (SE)
instead of the mean squared error (MSE).
**feature_reconstruction_loss_kwargs: Additional parameters of a
:class:`pystiche.loss.FeatureReconstructionLoss`.
.. seealso::
:class:`pystiche.loss.FeatureReconstructionLoss`
"""
def content_loss(
impl_params: bool = True,
multi_layer_encoder: Optional[enc.MultiLayerEncoder] = None,
hyper_parameters: Optional[HyperParameters] = None,
) -> FeatureReconstructionLoss:
r"""Content loss from :cite:`LW2016`.
Args:
impl_params: Switch the behavior and hyper-parameters between the reference
implementation of the original authors and what is described in the paper.
For details see :ref:`here <li_wand_2016-impl_params>`.
multi_layer_encoder: Pretrained multi-layer encoder. If
omitted, :func:`~pystiche_papers.li_wand_2016.multi_layer_encoder` is used.
hyper_parameters: Hyper parameters. If omitted,
:func:`~pystiche_papers.li_wand_2016.hyper_parameters` is used.
.. seealso::
:class:`pystiche_papers.li_wand_2016.FeatureReconstructionLoss`
"""
if multi_layer_encoder is None:
multi_layer_encoder = _multi_layer_encoder()
if hyper_parameters is None:
hyper_parameters = _hyper_parameters(impl_params=impl_params)
return FeatureReconstructionLoss(
multi_layer_encoder.extract_encoder(hyper_parameters.content_loss.layer),
impl_params=impl_params,
score_weight=hyper_parameters.content_loss.score_weight,
)
class MRFLoss(loss.MRFLoss):
r"""MRF loss from :cite:`LW2016`.
Args:
encoder: Encoder used to encode the input.
patch_size: Spatial size of the neural patches.
impl_params: If ``True``, normalize the gradient of the neural patches. If
``False``, use a score correction factor of 1/2.
**mrf_loss_kwargs: Additional parameters of a :class:`pystiche.loss.MRFLoss`.
In contrast to :class:`pystiche.loss.MRFLoss`, the score is calculated with the
squared error (SE) instead of the mean squared error (MSE).
.. seealso::
- :class:`pystiche.loss.MRFLoss`
- :func:`pystiche_papers.li_wand_2016.extract_normalized_patches2d`
"""
def style_loss(
impl_params: bool = True,
multi_layer_encoder: Optional[enc.MultiLayerEncoder] = None,
hyper_parameters: Optional[HyperParameters] = None,
) -> loss.MultiLayerEncodingLoss:
r"""Style loss from :cite:`LW2016`.
Args:
impl_params: Switch the behavior and hyper-parameters between the reference
implementation of the original authors and what is described in the paper.
For details see :ref:`here <li_wand_2016-impl_params>`.
multi_layer_encoder: Pretrained multi-layer encoder. If
omitted, :func:`~pystiche_papers.li_wand_2016.multi_layer_encoder` is used.
hyper_parameters: Hyper parameters. If omitted,
:func:`~pystiche_papers.li_wand_2016.hyper_parameters` is used.
.. seealso::
- :class:`pystiche_papers.li_wand_2016.MRFLoss`
"""
if multi_layer_encoder is None:
multi_layer_encoder = _multi_layer_encoder()
if hyper_parameters is None:
hyper_parameters = _hyper_parameters(impl_params=impl_params)
return loss.MultiLayerEncodingLoss(
multi_layer_encoder,
hyper_parameters.style_loss.layers,
encoding_loss_fn,
layer_weights=hyper_parameters.style_loss.layer_weights,
score_weight=hyper_parameters.style_loss.score_weight,
)
class TotalVariationLoss(loss.TotalVariationLoss):
r"""Total variation loss from :cite:`LW2016`.
Args:
impl_params: If ``False``, use a score correction factor of 1/2.
**total_variation_loss_kwargs: Additional parameters of a
:class:`pystiche.loss.TotalVariationLoss`.
In contrast to :class:`pystiche.loss.TotalVariationLoss`, the the score is
calculated with the squared error (SE) instead of the mean squared error (MSE).
.. seealso::
- :class:`pystiche.loss.TotalVariationLoss`
"""
def regularization(
impl_params: bool = True,
hyper_parameters: Optional[HyperParameters] = None,
) -> TotalVariationLoss:
r"""Regularization from :cite:`LW2016`.
Args:
impl_params: Switch the behavior and hyper-parameters between the reference
implementation of the original authors and what is described in the paper.
For details see :ref:`here <li_wand_2016-impl_params>`.
hyper_parameters: Hyper parameters. If omitted,
:func:`~pystiche_papers.li_wand_2016.hyper_parameters` is used.
.. seealso::
- :class:`pystiche_papers.li_wand_2016.TotalVariationLoss`
"""
if hyper_parameters is None:
hyper_parameters = _hyper_parameters()
return TotalVariationLoss(
impl_params=impl_params,
score_weight=hyper_parameters.regularization.score_weight,
)
def perceptual_loss(
impl_params: bool = True,
multi_layer_encoder: Optional[enc.MultiLayerEncoder] = None,
hyper_parameters: Optional[HyperParameters] = None,
) -> loss.PerceptualLoss:
r"""Perceptual loss from :cite:`LW2016`.
Args:
impl_params: Switch the behavior and hyper-parameters between the reference
implementation of the original authors and what is described in the paper.
For details see :ref:`here <li_wand_2016-impl_params>`.
multi_layer_encoder: Pretrained multi-layer encoder. If
omitted, :func:`~pystiche_papers.li_wand_2016.multi_layer_encoder` is used.
hyper_parameters: Hyper parameters. If omitted,
:func:`~pystiche_papers.li_wand_2016.hyper_parameters` is used.
.. seealso::
- :func:`pystiche_papers.li_wand_2016.content_loss`
- :func:`pystiche_papers.li_wand_2016.style_loss`
- :func:`pystiche_papers.li_wand_2016.regularization`
"""
if multi_layer_encoder is None:
multi_layer_encoder = _multi_layer_encoder()
if hyper_parameters is None:
hyper_parameters = _hyper_parameters()
return loss.PerceptualLoss(
content_loss(
impl_params=impl_params,
multi_layer_encoder=multi_layer_encoder,
hyper_parameters=hyper_parameters,
),
style_loss(
impl_params=impl_params,
multi_layer_encoder=multi_layer_encoder,
hyper_parameters=hyper_parameters,
),
regularization(impl_params=impl_params, hyper_parameters=hyper_parameters),
)
| 37.461538 | 110 | 0.693703 | from typing import Any, Optional, Tuple, Union
import torch
from torch.nn.functional import mse_loss
import pystiche
import pystiche.loss.functional as F
from pystiche import enc, loss
from pystiche_papers.utils import HyperParameters
from ._utils import (
extract_normalized_patches2d,
hyper_parameters as _hyper_parameters,
multi_layer_encoder as _multi_layer_encoder,
target_transforms as _target_transforms,
)
__all__ = [
"FeatureReconstructionLoss",
"content_loss",
"MRFLoss",
"style_loss",
"TotalVariationLoss",
"regularization",
"perceptual_loss",
]
class FeatureReconstructionLoss(loss.FeatureReconstructionLoss):
r"""Feature reconstruction loss from :cite:`LW2016`.
Args:
encoder: Encoder used to encode the input.
impl_params: If ``False``, calculate the score with the squared error (SE)
instead of the mean squared error (MSE).
**feature_reconstruction_loss_kwargs: Additional parameters of a
:class:`pystiche.loss.FeatureReconstructionLoss`.
.. seealso::
:class:`pystiche.loss.FeatureReconstructionLoss`
"""
def __init__(
self,
encoder: enc.Encoder,
impl_params: bool = True,
**feature_reconstruction_loss_kwargs: Any,
):
super().__init__(encoder, **feature_reconstruction_loss_kwargs)
# https://github.com/pmeier/CNNMRF/blob/fddcf4d01e2a6ce201059d8bc38597f74a09ba3f/mylib/content.lua#L15
# nn.MSECriterion() was used as criterion to calculate the content loss, which
# by default uses reduction="mean"
self.loss_reduction = "mean" if impl_params else "sum"
def calculate_score(
self,
input_repr: torch.Tensor,
target_repr: torch.Tensor,
ctx: Optional[torch.Tensor],
) -> torch.Tensor:
return mse_loss(input_repr, target_repr, reduction=self.loss_reduction)
def content_loss(
impl_params: bool = True,
multi_layer_encoder: Optional[enc.MultiLayerEncoder] = None,
hyper_parameters: Optional[HyperParameters] = None,
) -> FeatureReconstructionLoss:
r"""Content loss from :cite:`LW2016`.
Args:
impl_params: Switch the behavior and hyper-parameters between the reference
implementation of the original authors and what is described in the paper.
For details see :ref:`here <li_wand_2016-impl_params>`.
multi_layer_encoder: Pretrained multi-layer encoder. If
omitted, :func:`~pystiche_papers.li_wand_2016.multi_layer_encoder` is used.
hyper_parameters: Hyper parameters. If omitted,
:func:`~pystiche_papers.li_wand_2016.hyper_parameters` is used.
.. seealso::
:class:`pystiche_papers.li_wand_2016.FeatureReconstructionLoss`
"""
if multi_layer_encoder is None:
multi_layer_encoder = _multi_layer_encoder()
if hyper_parameters is None:
hyper_parameters = _hyper_parameters(impl_params=impl_params)
return FeatureReconstructionLoss(
multi_layer_encoder.extract_encoder(hyper_parameters.content_loss.layer),
impl_params=impl_params,
score_weight=hyper_parameters.content_loss.score_weight,
)
class MRFLoss(loss.MRFLoss):
r"""MRF loss from :cite:`LW2016`.
Args:
encoder: Encoder used to encode the input.
patch_size: Spatial size of the neural patches.
impl_params: If ``True``, normalize the gradient of the neural patches. If
``False``, use a score correction factor of 1/2.
**mrf_loss_kwargs: Additional parameters of a :class:`pystiche.loss.MRFLoss`.
In contrast to :class:`pystiche.loss.MRFLoss`, the score is calculated with the
squared error (SE) instead of the mean squared error (MSE).
.. seealso::
- :class:`pystiche.loss.MRFLoss`
- :func:`pystiche_papers.li_wand_2016.extract_normalized_patches2d`
"""
def __init__(
self,
encoder: enc.Encoder,
patch_size: Union[int, Tuple[int, int]],
impl_params: bool = True,
**mrf_loss_kwargs: Any,
):
super().__init__(encoder, patch_size, **mrf_loss_kwargs)
# https://github.com/pmeier/CNNMRF/blob/fddcf4d01e2a6ce201059d8bc38597f74a09ba3f/mylib/mrf.lua#L221
# https://github.com/pmeier/CNNMRF/blob/fddcf4d01e2a6ce201059d8bc38597f74a09ba3f/mylib/mrf.lua#L224
# They use normalized patches instead of the unnormalized patches described in
# the paper.
self.normalize_patches_grad = impl_params
self.loss_reduction = "sum"
# The score correction factor is not visible in the reference implementation
# of the original authors, since the calculation is performed with respect to
# the gradient and not the score. Roughly speaking, since the calculation
# comprises a *squared* distance, we need a factor of 1/2 in the forward pass.
# https://github.com/pmeier/CNNMRF/blob/fddcf4d01e2a6ce201059d8bc38597f74a09ba3f/mylib/mrf.lua#L220
self.score_correction_factor = 1.0 / 2.0 if impl_params else 1.0
def enc_to_repr(self, enc: torch.Tensor, is_guided: bool) -> torch.Tensor:
if self.normalize_patches_grad:
repr = extract_normalized_patches2d(enc, self.patch_size, self.stride)
else:
repr = pystiche.extract_patches2d(enc, self.patch_size, self.stride)
if not is_guided:
return repr
return self._guide_repr(repr)
def calculate_score(
self,
input_repr: torch.Tensor,
target_repr: torch.Tensor,
ctx: Optional[torch.Tensor],
) -> torch.Tensor:
score = F.mrf_loss(
input_repr, target_repr, reduction=self.loss_reduction, batched_input=True
)
return score * self.score_correction_factor
def style_loss(
impl_params: bool = True,
multi_layer_encoder: Optional[enc.MultiLayerEncoder] = None,
hyper_parameters: Optional[HyperParameters] = None,
) -> loss.MultiLayerEncodingLoss:
r"""Style loss from :cite:`LW2016`.
Args:
impl_params: Switch the behavior and hyper-parameters between the reference
implementation of the original authors and what is described in the paper.
For details see :ref:`here <li_wand_2016-impl_params>`.
multi_layer_encoder: Pretrained multi-layer encoder. If
omitted, :func:`~pystiche_papers.li_wand_2016.multi_layer_encoder` is used.
hyper_parameters: Hyper parameters. If omitted,
:func:`~pystiche_papers.li_wand_2016.hyper_parameters` is used.
.. seealso::
- :class:`pystiche_papers.li_wand_2016.MRFLoss`
"""
if multi_layer_encoder is None:
multi_layer_encoder = _multi_layer_encoder()
if hyper_parameters is None:
hyper_parameters = _hyper_parameters(impl_params=impl_params)
def encoding_loss_fn(encoder: enc.Encoder, layer_weight: float) -> MRFLoss:
return MRFLoss(
encoder,
hyper_parameters.style_loss.patch_size, # type: ignore[union-attr]
impl_params=impl_params,
stride=hyper_parameters.style_loss.stride, # type: ignore[union-attr]
target_transforms=_target_transforms(
impl_params=impl_params, hyper_parameters=hyper_parameters
),
score_weight=layer_weight,
)
return loss.MultiLayerEncodingLoss(
multi_layer_encoder,
hyper_parameters.style_loss.layers,
encoding_loss_fn,
layer_weights=hyper_parameters.style_loss.layer_weights,
score_weight=hyper_parameters.style_loss.score_weight,
)
class TotalVariationLoss(loss.TotalVariationLoss):
r"""Total variation loss from :cite:`LW2016`.
Args:
impl_params: If ``False``, use a score correction factor of 1/2.
**total_variation_loss_kwargs: Additional parameters of a
:class:`pystiche.loss.TotalVariationLoss`.
In contrast to :class:`pystiche.loss.TotalVariationLoss`, the the score is
calculated with the squared error (SE) instead of the mean squared error (MSE).
.. seealso::
- :class:`pystiche.loss.TotalVariationLoss`
"""
def __init__(self, impl_params: bool = True, **total_variation_loss_kwargs: Any):
super().__init__(**total_variation_loss_kwargs)
self.loss_reduction = "sum"
# The score correction factor is not visible in the reference implementation
# of the original authors, since the calculation is performed with respect to
# the gradient and not the score. Roughly speaking, since the calculation
# comprises a *squared* distance, we need a factor of 1/2 in the forward pass.
# https://github.com/pmeier/CNNMRF/blob/fddcf4d01e2a6ce201059d8bc38597f74a09ba3f/mylib/tv.lua#L20-L30
self.score_correction_factor = 1.0 / 2.0 if impl_params else 1.0
def calculate_score(self, input_repr: torch.Tensor) -> torch.Tensor:
score = F.total_variation_loss(
input_repr, exponent=self.exponent, reduction=self.loss_reduction
)
return score * self.score_correction_factor
def regularization(
impl_params: bool = True,
hyper_parameters: Optional[HyperParameters] = None,
) -> TotalVariationLoss:
r"""Regularization from :cite:`LW2016`.
Args:
impl_params: Switch the behavior and hyper-parameters between the reference
implementation of the original authors and what is described in the paper.
For details see :ref:`here <li_wand_2016-impl_params>`.
hyper_parameters: Hyper parameters. If omitted,
:func:`~pystiche_papers.li_wand_2016.hyper_parameters` is used.
.. seealso::
- :class:`pystiche_papers.li_wand_2016.TotalVariationLoss`
"""
if hyper_parameters is None:
hyper_parameters = _hyper_parameters()
return TotalVariationLoss(
impl_params=impl_params,
score_weight=hyper_parameters.regularization.score_weight,
)
def perceptual_loss(
impl_params: bool = True,
multi_layer_encoder: Optional[enc.MultiLayerEncoder] = None,
hyper_parameters: Optional[HyperParameters] = None,
) -> loss.PerceptualLoss:
r"""Perceptual loss from :cite:`LW2016`.
Args:
impl_params: Switch the behavior and hyper-parameters between the reference
implementation of the original authors and what is described in the paper.
For details see :ref:`here <li_wand_2016-impl_params>`.
multi_layer_encoder: Pretrained multi-layer encoder. If
omitted, :func:`~pystiche_papers.li_wand_2016.multi_layer_encoder` is used.
hyper_parameters: Hyper parameters. If omitted,
:func:`~pystiche_papers.li_wand_2016.hyper_parameters` is used.
.. seealso::
- :func:`pystiche_papers.li_wand_2016.content_loss`
- :func:`pystiche_papers.li_wand_2016.style_loss`
- :func:`pystiche_papers.li_wand_2016.regularization`
"""
if multi_layer_encoder is None:
multi_layer_encoder = _multi_layer_encoder()
if hyper_parameters is None:
hyper_parameters = _hyper_parameters()
return loss.PerceptualLoss(
content_loss(
impl_params=impl_params,
multi_layer_encoder=multi_layer_encoder,
hyper_parameters=hyper_parameters,
),
style_loss(
impl_params=impl_params,
multi_layer_encoder=multi_layer_encoder,
hyper_parameters=hyper_parameters,
),
regularization(impl_params=impl_params, hyper_parameters=hyper_parameters),
)
| 3,954 | 0 | 216 |
1b3186c99a60818dc9d24b438538877520aa1347 | 2,640 | py | Python | tests/conftest.py | Z2PackDev/bands_inspect | 76fdb0130d9ff64c738365a1911bc61f035927f2 | [
"Apache-2.0"
] | 1 | 2017-12-19T07:21:56.000Z | 2017-12-19T07:21:56.000Z | tests/conftest.py | Z2PackDev/bands-inspect | 76fdb0130d9ff64c738365a1911bc61f035927f2 | [
"Apache-2.0"
] | 3 | 2018-02-27T09:07:46.000Z | 2018-03-06T12:26:04.000Z | tests/conftest.py | Z2PackDev/bands_inspect | 76fdb0130d9ff64c738365a1911bc61f035927f2 | [
"Apache-2.0"
] | 1 | 2017-12-19T07:21:55.000Z | 2017-12-19T07:21:55.000Z | # -*- coding: utf-8 -*-
# (c) 2017-2019, ETH Zurich, Institut fuer Theoretische Physik
# Author: Dominik Gresch <greschd@gmx.ch>
"""
Configuration file for the pytest tests.
"""
import os
import json
import pytest
import numpy as np
import bands_inspect as bi
import parameters # pylint: disable=wrong-import-order
#--------------------------FIXTURES-------------------------------------#
@pytest.fixture
def test_name(request):
"""Returns module_name.function_name for a given test"""
return request.module.__name__ + '/' + request._parent_request._pyfuncitem.name # pylint: disable=protected-access
@pytest.fixture
def compare_data(request, test_name, scope="session"): # pylint: disable=unused-argument,redefined-outer-name
"""Returns a function which either saves some data to a file or (if that file exists already) compares it to pre-existing data using a given comparison function."""
return inner
@pytest.fixture
def compare_equal(compare_data): # pylint: disable=redefined-outer-name
"""
Returns a function which checks that a given data is equal to the stored reference.
"""
return lambda data, tag=None: compare_data(lambda x, y: x == y, data, tag)
@pytest.fixture
def assert_equal():
"""
Returns a function which checks that two bands-inspect object instances are equal.
"""
return inner
@pytest.fixture
def sample():
"""
Returns the absolute path of the sample with a given name.
"""
return inner
| 30.697674 | 168 | 0.659848 | # -*- coding: utf-8 -*-
# (c) 2017-2019, ETH Zurich, Institut fuer Theoretische Physik
# Author: Dominik Gresch <greschd@gmx.ch>
"""
Configuration file for the pytest tests.
"""
import os
import json
import pytest
import numpy as np
import bands_inspect as bi
import parameters # pylint: disable=wrong-import-order
#--------------------------FIXTURES-------------------------------------#
@pytest.fixture
def test_name(request):
"""Returns module_name.function_name for a given test"""
return request.module.__name__ + '/' + request._parent_request._pyfuncitem.name # pylint: disable=protected-access
@pytest.fixture
def compare_data(request, test_name, scope="session"): # pylint: disable=unused-argument,redefined-outer-name
"""Returns a function which either saves some data to a file or (if that file exists already) compares it to pre-existing data using a given comparison function."""
def inner(compare_fct, data, tag=None):
full_name = test_name + (tag or '')
# get rid of json-specific quirks
# store as string because I cannot add the decoder to the pytest cache
data_str = json.dumps(data)
data = json.loads(data_str)
val = json.loads(request.config.cache.get(full_name, 'null'))
if val is None:
request.config.cache.set(full_name, data_str)
raise ValueError('Reference data does not exist.')
assert compare_fct(val, data)
return inner
@pytest.fixture
def compare_equal(compare_data): # pylint: disable=redefined-outer-name
"""
Returns a function which checks that a given data is equal to the stored reference.
"""
return lambda data, tag=None: compare_data(lambda x, y: x == y, data, tag)
@pytest.fixture
def assert_equal():
"""
Returns a function which checks that two bands-inspect object instances are equal.
"""
def inner(obj1, obj2):
if isinstance(obj1, bi.kpoints.KpointsBase):
np.testing.assert_equal(
obj1.kpoints_explicit, obj2.kpoints_explicit
)
elif isinstance(obj1, bi.eigenvals.EigenvalsData):
np.testing.assert_equal(
obj1.kpoints.kpoints_explicit, obj2.kpoints.kpoints_explicit
)
np.testing.assert_equal(obj1.eigenvals, obj2.eigenvals)
else:
raise ValueError("Unknown type {}".format(type(obj1)))
return inner
@pytest.fixture
def sample():
"""
Returns the absolute path of the sample with a given name.
"""
def inner(name):
return os.path.join(parameters.SAMPLES_DIR, name)
return inner
| 1,065 | 0 | 78 |
End of preview. Expand
in Dataset Viewer.
This is a dataset originated from bigcode/the-stack-dedup with some filters applied. The filters filtered in this dataset are:
- remove_function_no_docstring
- remove_class_no_docstring
- remove_delete_markers
- Downloads last month
- 57